From ef3bc0d21b8aaaae384b50859e217be9bb3a6ddc Mon Sep 17 00:00:00 2001 From: "instance.id" Date: Sun, 24 Jul 2022 13:37:24 -0500 Subject: [PATCH] Houdini 19.5 and python 3.9 update --- README.md | 71 +- build.ps1 | 4 +- python3.9libs/Qt.py | 1989 +++++ python3.9libs/QtPy-1.9.0.dist-info/AUTHORS.md | 16 + python3.9libs/QtPy-1.9.0.dist-info/INSTALLER | 1 + .../QtPy-1.9.0.dist-info/LICENSE.txt | 22 + python3.9libs/QtPy-1.9.0.dist-info/METADATA | 111 + python3.9libs/QtPy-1.9.0.dist-info/RECORD | 151 + python3.9libs/QtPy-1.9.0.dist-info/WHEEL | 6 + .../QtPy-1.9.0.dist-info/top_level.txt | 1 + python3.9libs/__init__.py | 2 + .../peewee-3.14.8.dist-info/INSTALLER | 1 + python3.9libs/peewee-3.14.8.dist-info/LICENSE | 19 + .../peewee-3.14.8.dist-info/METADATA | 161 + python3.9libs/peewee-3.14.8.dist-info/RECORD | 61 + .../peewee-3.14.8.dist-info/REQUESTED | 0 python3.9libs/peewee-3.14.8.dist-info/WHEEL | 5 + .../peewee-3.14.8.dist-info/top_level.txt | 3 + python3.9libs/peewee.py | 7814 +++++++++++++++++ python3.9libs/playhouse/__init__.py | 0 python3.9libs/playhouse/apsw_ext.py | 147 + python3.9libs/playhouse/cockroachdb.py | 224 + python3.9libs/playhouse/dataset.py | 454 + python3.9libs/playhouse/db_url.py | 130 + python3.9libs/playhouse/fields.py | 60 + python3.9libs/playhouse/flask_utils.py | 185 + python3.9libs/playhouse/hybrid.py | 53 + python3.9libs/playhouse/kv.py | 172 + python3.9libs/playhouse/migrate.py | 886 ++ python3.9libs/playhouse/mysql_ext.py | 90 + python3.9libs/playhouse/pool.py | 318 + python3.9libs/playhouse/postgres_ext.py | 493 ++ python3.9libs/playhouse/psycopg3_ext.py | 35 + python3.9libs/playhouse/reflection.py | 833 ++ python3.9libs/playhouse/shortcuts.py | 280 + python3.9libs/playhouse/signals.py | 79 + python3.9libs/playhouse/sqlcipher_ext.py | 106 + python3.9libs/playhouse/sqlite_changelog.py | 123 + python3.9libs/playhouse/sqlite_ext.py | 1311 +++ python3.9libs/playhouse/sqlite_udf.py | 536 ++ python3.9libs/playhouse/sqliteq.py | 331 + python3.9libs/playhouse/test_utils.py | 62 + python3.9libs/pythonrc.py | 24 + python3.9libs/qtpy/Qt3DAnimation.py | 26 + python3.9libs/qtpy/Qt3DCore.py | 26 + python3.9libs/qtpy/Qt3DExtras.py | 26 + python3.9libs/qtpy/Qt3DInput.py | 26 + python3.9libs/qtpy/Qt3DLogic.py | 26 + python3.9libs/qtpy/Qt3DRender.py | 26 + python3.9libs/qtpy/QtCharts.py | 22 + python3.9libs/qtpy/QtCore.py | 109 + python3.9libs/qtpy/QtDatavisualization.py | 22 + python3.9libs/qtpy/QtDesigner.py | 20 + python3.9libs/qtpy/QtGui.py | 157 + python3.9libs/qtpy/QtHelp.py | 24 + python3.9libs/qtpy/QtLocation.py | 18 + python3.9libs/qtpy/QtMultimedia.py | 17 + python3.9libs/qtpy/QtMultimediaWidgets.py | 18 + python3.9libs/qtpy/QtNetwork.py | 25 + python3.9libs/qtpy/QtOpenGL.py | 24 + python3.9libs/qtpy/QtPrintSupport.py | 28 + python3.9libs/qtpy/QtQml.py | 18 + python3.9libs/qtpy/QtQuick.py | 18 + python3.9libs/qtpy/QtQuickWidgets.py | 18 + python3.9libs/qtpy/QtSql.py | 24 + python3.9libs/qtpy/QtSvg.py | 24 + python3.9libs/qtpy/QtTest.py | 30 + python3.9libs/qtpy/QtWebChannel.py | 18 + python3.9libs/qtpy/QtWebEngineWidgets.py | 45 + python3.9libs/qtpy/QtWebSockets.py | 18 + python3.9libs/qtpy/QtWidgets.py | 133 + python3.9libs/qtpy/QtXmlPatterns.py | 22 + python3.9libs/qtpy/__init__.py | 226 + python3.9libs/qtpy/_patch/__init__.py | 0 python3.9libs/qtpy/_patch/qcombobox.py | 101 + python3.9libs/qtpy/_patch/qheaderview.py | 96 + python3.9libs/qtpy/_version.py | 2 + python3.9libs/qtpy/compat.py | 196 + python3.9libs/qtpy/py3compat.py | 261 + python3.9libs/qtpy/tests/__init__.py | 0 python3.9libs/qtpy/tests/conftest.py | 71 + python3.9libs/qtpy/tests/runtests.py | 26 + python3.9libs/qtpy/tests/test_macos_checks.py | 110 + python3.9libs/qtpy/tests/test_main.py | 82 + .../qtpy/tests/test_patch_qcombobox.py | 106 + .../qtpy/tests/test_patch_qheaderview.py | 98 + .../qtpy/tests/test_qdesktopservice_split.py | 41 + .../qtpy/tests/test_qt3danimation.py | 25 + python3.9libs/qtpy/tests/test_qt3dcore.py | 44 + python3.9libs/qtpy/tests/test_qt3dextras.py | 47 + python3.9libs/qtpy/tests/test_qt3dinput.py | 33 + python3.9libs/qtpy/tests/test_qt3dlogic.py | 12 + python3.9libs/qtpy/tests/test_qt3drender.py | 119 + python3.9libs/qtpy/tests/test_qtcharts.py | 11 + python3.9libs/qtpy/tests/test_qtcore.py | 18 + .../qtpy/tests/test_qtdatavisualization.py | 46 + python3.9libs/qtpy/tests/test_qtdesigner.py | 28 + python3.9libs/qtpy/tests/test_qthelp.py | 22 + python3.9libs/qtpy/tests/test_qtlocation.py | 48 + python3.9libs/qtpy/tests/test_qtmultimedia.py | 18 + .../qtpy/tests/test_qtmultimediawidgets.py | 18 + python3.9libs/qtpy/tests/test_qtnetwork.py | 43 + .../qtpy/tests/test_qtprintsupport.py | 18 + python3.9libs/qtpy/tests/test_qtqml.py | 34 + python3.9libs/qtpy/tests/test_qtquick.py | 53 + .../qtpy/tests/test_qtquickwidgets.py | 10 + python3.9libs/qtpy/tests/test_qtsql.py | 24 + python3.9libs/qtpy/tests/test_qtsvg.py | 13 + python3.9libs/qtpy/tests/test_qttest.py | 9 + python3.9libs/qtpy/tests/test_qtwebchannel.py | 13 + .../qtpy/tests/test_qtwebenginewidgets.py | 12 + python3.9libs/qtpy/tests/test_qtwebsockets.py | 15 + .../qtpy/tests/test_qtxmlpatterns.py | 25 + python3.9libs/qtpy/tests/test_uic.py | 86 + python3.9libs/qtpy/uic.py | 228 + python3.9libs/searcher/.extra/Untitled.json | 14 + python3.9libs/searcher/.extra/__init__.py | 0 python3.9libs/searcher/.extra/bug.html | 25 + python3.9libs/searcher/.extra/bugsubmit.html | 20 + python3.9libs/searcher/.extra/scratch | 324 + python3.9libs/searcher/.extra/session.py | 111 + python3.9libs/searcher/.extra/shelf.py | 55 + python3.9libs/searcher/.vscode/settings.json | 12 + python3.9libs/searcher/HelpButton.py | 61 + python3.9libs/searcher/__init__.py | 7 + python3.9libs/searcher/about.py | 115 + python3.9libs/searcher/about_ui.py | 135 + python3.9libs/searcher/animator.py | 58 + python3.9libs/searcher/bugreport.py | 102 + python3.9libs/searcher/bugreport_ui.py | 69 + python3.9libs/searcher/colorfieldselector.py | 121 + python3.9libs/searcher/database.py | 437 + python3.9libs/searcher/datahandler.py | 121 + python3.9libs/searcher/debugutils.py | 48 + python3.9libs/searcher/enum.py | 38 + .../JetBrainsMono-Bold-Italic.ttf | Bin 0 -> 148512 bytes .../JetBrainsMono-Bold.ttf | Bin 0 -> 141824 bytes .../JetBrainsMono-ExtraBold-Italic.ttf | Bin 0 -> 150892 bytes .../JetBrainsMono-ExtraBold.ttf | Bin 0 -> 143352 bytes .../JetBrainsMono-Italic.ttf | Bin 0 -> 143928 bytes .../JetBrainsMono-Medium-Italic.ttf | Bin 0 -> 148272 bytes .../JetBrainsMono-Medium.ttf | Bin 0 -> 141324 bytes .../JetBrainsMono-Regular.ttf | Bin 0 -> 139332 bytes .../searcher/images/collapse_all.png | Bin 0 -> 1835 bytes python3.9libs/searcher/images/expand_all.png | Bin 0 -> 1937 bytes python3.9libs/searcher/images/help.png | Bin 0 -> 2416 bytes python3.9libs/searcher/images/help1.png | Bin 0 -> 961 bytes .../searcher/images/icon_branch_closed.png | Bin 0 -> 310 bytes .../searcher/images/icon_branch_end.png | Bin 0 -> 358 bytes .../searcher/images/icon_branch_more.png | Bin 0 -> 207 bytes .../searcher/images/icon_branch_open.png | Bin 0 -> 313 bytes python3.9libs/searcher/images/icon_vline.png | Bin 0 -> 303 bytes .../searcher/images/icons/at-solid.svg | 1 + .../searcher/images/icons/bug-solid.svg | 1 + .../images/icons/firefox-browser-brands.svg | 1 + .../searcher/images/icons/github-brands.svg | 1 + .../searcher/images/icons/twitter-brands.svg | 1 + python3.9libs/searcher/images/logo.png | Bin 0 -> 46816 bytes python3.9libs/searcher/images/resizeleft.png | Bin 0 -> 105 bytes python3.9libs/searcher/images/resizeright.png | Bin 0 -> 108 bytes python3.9libs/searcher/inspect.py | 1066 +++ python3.9libs/searcher/language_en.py | 69 + python3.9libs/searcher/nodegraphhooks.py | 62 + python3.9libs/searcher/platformselect.py | 31 + python3.9libs/searcher/ptime.py | 36 + python3.9libs/searcher/searcher.py | 1685 ++++ python3.9libs/searcher/searcher_settings.py | 700 ++ .../searcher/searcher_settings_ui.py | 269 + python3.9libs/searcher/searcher_ui.py | 284 + python3.9libs/searcher/searchersetup.py | 465 + python3.9libs/searcher/settings_data.py | 100 + python3.9libs/searcher/style.py | 334 + python3.9libs/searcher/theme.py | 248 + python3.9libs/searcher/theme_ui.py | 298 + python3.9libs/searcher/tools/imagetint.py | 70 + .../searcher/ui_files/SearcherSettings.py | 181 + .../searcher/ui_files/SearcherSettings.ui | 334 + python3.9libs/searcher/ui_files/about.py | 73 + python3.9libs/searcher/ui_files/about.ui | 123 + .../searcher/ui_files/bugreport.bak.ui | 165 + python3.9libs/searcher/ui_files/bugreport.py | 76 + python3.9libs/searcher/ui_files/bugreport.ui | 154 + .../searcher/ui_files/searcher_ui.py | 125 + .../searcher/ui_files/searcher_ui.ui | 248 + python3.9libs/searcher/ui_files/theme_tabs.py | 288 + python3.9libs/searcher/ui_files/theme_tabs.ui | 704 ++ python3.9libs/searcher/util.py | 724 ++ python3.9libs/searcher/widgets/__init__py | 0 .../searcher/widgets/collapsedock.py | 112 + python3.9libs/typing | 1 + 190 files changed, 31742 insertions(+), 27 deletions(-) create mode 100644 python3.9libs/Qt.py create mode 100644 python3.9libs/QtPy-1.9.0.dist-info/AUTHORS.md create mode 100644 python3.9libs/QtPy-1.9.0.dist-info/INSTALLER create mode 100644 python3.9libs/QtPy-1.9.0.dist-info/LICENSE.txt create mode 100644 python3.9libs/QtPy-1.9.0.dist-info/METADATA create mode 100644 python3.9libs/QtPy-1.9.0.dist-info/RECORD create mode 100644 python3.9libs/QtPy-1.9.0.dist-info/WHEEL create mode 100644 python3.9libs/QtPy-1.9.0.dist-info/top_level.txt create mode 100644 python3.9libs/__init__.py create mode 100644 python3.9libs/peewee-3.14.8.dist-info/INSTALLER create mode 100644 python3.9libs/peewee-3.14.8.dist-info/LICENSE create mode 100644 python3.9libs/peewee-3.14.8.dist-info/METADATA create mode 100644 python3.9libs/peewee-3.14.8.dist-info/RECORD create mode 100644 python3.9libs/peewee-3.14.8.dist-info/REQUESTED create mode 100644 python3.9libs/peewee-3.14.8.dist-info/WHEEL create mode 100644 python3.9libs/peewee-3.14.8.dist-info/top_level.txt create mode 100644 python3.9libs/peewee.py create mode 100644 python3.9libs/playhouse/__init__.py create mode 100644 python3.9libs/playhouse/apsw_ext.py create mode 100644 python3.9libs/playhouse/cockroachdb.py create mode 100644 python3.9libs/playhouse/dataset.py create mode 100644 python3.9libs/playhouse/db_url.py create mode 100644 python3.9libs/playhouse/fields.py create mode 100644 python3.9libs/playhouse/flask_utils.py create mode 100644 python3.9libs/playhouse/hybrid.py create mode 100644 python3.9libs/playhouse/kv.py create mode 100644 python3.9libs/playhouse/migrate.py create mode 100644 python3.9libs/playhouse/mysql_ext.py create mode 100644 python3.9libs/playhouse/pool.py create mode 100644 python3.9libs/playhouse/postgres_ext.py create mode 100644 python3.9libs/playhouse/psycopg3_ext.py create mode 100644 python3.9libs/playhouse/reflection.py create mode 100644 python3.9libs/playhouse/shortcuts.py create mode 100644 python3.9libs/playhouse/signals.py create mode 100644 python3.9libs/playhouse/sqlcipher_ext.py create mode 100644 python3.9libs/playhouse/sqlite_changelog.py create mode 100644 python3.9libs/playhouse/sqlite_ext.py create mode 100644 python3.9libs/playhouse/sqlite_udf.py create mode 100644 python3.9libs/playhouse/sqliteq.py create mode 100644 python3.9libs/playhouse/test_utils.py create mode 100644 python3.9libs/pythonrc.py create mode 100644 python3.9libs/qtpy/Qt3DAnimation.py create mode 100644 python3.9libs/qtpy/Qt3DCore.py create mode 100644 python3.9libs/qtpy/Qt3DExtras.py create mode 100644 python3.9libs/qtpy/Qt3DInput.py create mode 100644 python3.9libs/qtpy/Qt3DLogic.py create mode 100644 python3.9libs/qtpy/Qt3DRender.py create mode 100644 python3.9libs/qtpy/QtCharts.py create mode 100644 python3.9libs/qtpy/QtCore.py create mode 100644 python3.9libs/qtpy/QtDatavisualization.py create mode 100644 python3.9libs/qtpy/QtDesigner.py create mode 100644 python3.9libs/qtpy/QtGui.py create mode 100644 python3.9libs/qtpy/QtHelp.py create mode 100644 python3.9libs/qtpy/QtLocation.py create mode 100644 python3.9libs/qtpy/QtMultimedia.py create mode 100644 python3.9libs/qtpy/QtMultimediaWidgets.py create mode 100644 python3.9libs/qtpy/QtNetwork.py create mode 100644 python3.9libs/qtpy/QtOpenGL.py create mode 100644 python3.9libs/qtpy/QtPrintSupport.py create mode 100644 python3.9libs/qtpy/QtQml.py create mode 100644 python3.9libs/qtpy/QtQuick.py create mode 100644 python3.9libs/qtpy/QtQuickWidgets.py create mode 100644 python3.9libs/qtpy/QtSql.py create mode 100644 python3.9libs/qtpy/QtSvg.py create mode 100644 python3.9libs/qtpy/QtTest.py create mode 100644 python3.9libs/qtpy/QtWebChannel.py create mode 100644 python3.9libs/qtpy/QtWebEngineWidgets.py create mode 100644 python3.9libs/qtpy/QtWebSockets.py create mode 100644 python3.9libs/qtpy/QtWidgets.py create mode 100644 python3.9libs/qtpy/QtXmlPatterns.py create mode 100644 python3.9libs/qtpy/__init__.py create mode 100644 python3.9libs/qtpy/_patch/__init__.py create mode 100644 python3.9libs/qtpy/_patch/qcombobox.py create mode 100644 python3.9libs/qtpy/_patch/qheaderview.py create mode 100644 python3.9libs/qtpy/_version.py create mode 100644 python3.9libs/qtpy/compat.py create mode 100644 python3.9libs/qtpy/py3compat.py create mode 100644 python3.9libs/qtpy/tests/__init__.py create mode 100644 python3.9libs/qtpy/tests/conftest.py create mode 100644 python3.9libs/qtpy/tests/runtests.py create mode 100644 python3.9libs/qtpy/tests/test_macos_checks.py create mode 100644 python3.9libs/qtpy/tests/test_main.py create mode 100644 python3.9libs/qtpy/tests/test_patch_qcombobox.py create mode 100644 python3.9libs/qtpy/tests/test_patch_qheaderview.py create mode 100644 python3.9libs/qtpy/tests/test_qdesktopservice_split.py create mode 100644 python3.9libs/qtpy/tests/test_qt3danimation.py create mode 100644 python3.9libs/qtpy/tests/test_qt3dcore.py create mode 100644 python3.9libs/qtpy/tests/test_qt3dextras.py create mode 100644 python3.9libs/qtpy/tests/test_qt3dinput.py create mode 100644 python3.9libs/qtpy/tests/test_qt3dlogic.py create mode 100644 python3.9libs/qtpy/tests/test_qt3drender.py create mode 100644 python3.9libs/qtpy/tests/test_qtcharts.py create mode 100644 python3.9libs/qtpy/tests/test_qtcore.py create mode 100644 python3.9libs/qtpy/tests/test_qtdatavisualization.py create mode 100644 python3.9libs/qtpy/tests/test_qtdesigner.py create mode 100644 python3.9libs/qtpy/tests/test_qthelp.py create mode 100644 python3.9libs/qtpy/tests/test_qtlocation.py create mode 100644 python3.9libs/qtpy/tests/test_qtmultimedia.py create mode 100644 python3.9libs/qtpy/tests/test_qtmultimediawidgets.py create mode 100644 python3.9libs/qtpy/tests/test_qtnetwork.py create mode 100644 python3.9libs/qtpy/tests/test_qtprintsupport.py create mode 100644 python3.9libs/qtpy/tests/test_qtqml.py create mode 100644 python3.9libs/qtpy/tests/test_qtquick.py create mode 100644 python3.9libs/qtpy/tests/test_qtquickwidgets.py create mode 100644 python3.9libs/qtpy/tests/test_qtsql.py create mode 100644 python3.9libs/qtpy/tests/test_qtsvg.py create mode 100644 python3.9libs/qtpy/tests/test_qttest.py create mode 100644 python3.9libs/qtpy/tests/test_qtwebchannel.py create mode 100644 python3.9libs/qtpy/tests/test_qtwebenginewidgets.py create mode 100644 python3.9libs/qtpy/tests/test_qtwebsockets.py create mode 100644 python3.9libs/qtpy/tests/test_qtxmlpatterns.py create mode 100644 python3.9libs/qtpy/tests/test_uic.py create mode 100644 python3.9libs/qtpy/uic.py create mode 100644 python3.9libs/searcher/.extra/Untitled.json create mode 100644 python3.9libs/searcher/.extra/__init__.py create mode 100644 python3.9libs/searcher/.extra/bug.html create mode 100644 python3.9libs/searcher/.extra/bugsubmit.html create mode 100644 python3.9libs/searcher/.extra/scratch create mode 100644 python3.9libs/searcher/.extra/session.py create mode 100644 python3.9libs/searcher/.extra/shelf.py create mode 100644 python3.9libs/searcher/.vscode/settings.json create mode 100644 python3.9libs/searcher/HelpButton.py create mode 100644 python3.9libs/searcher/__init__.py create mode 100644 python3.9libs/searcher/about.py create mode 100644 python3.9libs/searcher/about_ui.py create mode 100644 python3.9libs/searcher/animator.py create mode 100644 python3.9libs/searcher/bugreport.py create mode 100644 python3.9libs/searcher/bugreport_ui.py create mode 100644 python3.9libs/searcher/colorfieldselector.py create mode 100644 python3.9libs/searcher/database.py create mode 100644 python3.9libs/searcher/datahandler.py create mode 100644 python3.9libs/searcher/debugutils.py create mode 100644 python3.9libs/searcher/enum.py create mode 100644 python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-Bold-Italic.ttf create mode 100644 python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-Bold.ttf create mode 100644 python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-ExtraBold-Italic.ttf create mode 100644 python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-ExtraBold.ttf create mode 100644 python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-Italic.ttf create mode 100644 python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-Medium-Italic.ttf create mode 100644 python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-Medium.ttf create mode 100644 python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-Regular.ttf create mode 100644 python3.9libs/searcher/images/collapse_all.png create mode 100644 python3.9libs/searcher/images/expand_all.png create mode 100644 python3.9libs/searcher/images/help.png create mode 100644 python3.9libs/searcher/images/help1.png create mode 100644 python3.9libs/searcher/images/icon_branch_closed.png create mode 100644 python3.9libs/searcher/images/icon_branch_end.png create mode 100644 python3.9libs/searcher/images/icon_branch_more.png create mode 100644 python3.9libs/searcher/images/icon_branch_open.png create mode 100644 python3.9libs/searcher/images/icon_vline.png create mode 100644 python3.9libs/searcher/images/icons/at-solid.svg create mode 100644 python3.9libs/searcher/images/icons/bug-solid.svg create mode 100644 python3.9libs/searcher/images/icons/firefox-browser-brands.svg create mode 100644 python3.9libs/searcher/images/icons/github-brands.svg create mode 100644 python3.9libs/searcher/images/icons/twitter-brands.svg create mode 100644 python3.9libs/searcher/images/logo.png create mode 100644 python3.9libs/searcher/images/resizeleft.png create mode 100644 python3.9libs/searcher/images/resizeright.png create mode 100644 python3.9libs/searcher/inspect.py create mode 100644 python3.9libs/searcher/language_en.py create mode 100644 python3.9libs/searcher/nodegraphhooks.py create mode 100644 python3.9libs/searcher/platformselect.py create mode 100644 python3.9libs/searcher/ptime.py create mode 100644 python3.9libs/searcher/searcher.py create mode 100644 python3.9libs/searcher/searcher_settings.py create mode 100644 python3.9libs/searcher/searcher_settings_ui.py create mode 100644 python3.9libs/searcher/searcher_ui.py create mode 100644 python3.9libs/searcher/searchersetup.py create mode 100644 python3.9libs/searcher/settings_data.py create mode 100644 python3.9libs/searcher/style.py create mode 100644 python3.9libs/searcher/theme.py create mode 100644 python3.9libs/searcher/theme_ui.py create mode 100644 python3.9libs/searcher/tools/imagetint.py create mode 100644 python3.9libs/searcher/ui_files/SearcherSettings.py create mode 100644 python3.9libs/searcher/ui_files/SearcherSettings.ui create mode 100644 python3.9libs/searcher/ui_files/about.py create mode 100644 python3.9libs/searcher/ui_files/about.ui create mode 100644 python3.9libs/searcher/ui_files/bugreport.bak.ui create mode 100644 python3.9libs/searcher/ui_files/bugreport.py create mode 100644 python3.9libs/searcher/ui_files/bugreport.ui create mode 100644 python3.9libs/searcher/ui_files/searcher_ui.py create mode 100644 python3.9libs/searcher/ui_files/searcher_ui.ui create mode 100644 python3.9libs/searcher/ui_files/theme_tabs.py create mode 100644 python3.9libs/searcher/ui_files/theme_tabs.ui create mode 100644 python3.9libs/searcher/util.py create mode 100644 python3.9libs/searcher/widgets/__init__py create mode 100644 python3.9libs/searcher/widgets/collapsedock.py create mode 160000 python3.9libs/typing diff --git a/README.md b/README.md index 4aceb35..0486b31 100644 --- a/README.md +++ b/README.md @@ -9,17 +9,32 @@ ### Currently only *fully* working on Windows and Linux. If there is interest in a working Mac version, let me know. #### [Installation](#install) | [Compatability Details](#notes) + --- + Thanks for checking out Searcher. Below are the instructions to get you up and running. ## Help Docs + https://help.instance.id/searcher/ -## Note for Houdini 18.5+ -SideFX has only included SQLite v 3.31.0 with H18.5 and their support has told me they have no plans to upgrade it to 3.33.0 (which has FTS5 enabled (Full-Text Search), which is needed by Searcher). Because of this, an extra step is required to install/use Searcher with Houdini 18.5+ until/unless they decide to include SQLite 3.33.0 instead of 3.31.0. +--- + +## Change Log: + +### v0.1.3 + +Added Houdini v19.5 compatibility + +--- + +## Note for Houdini 18.5-19.5 (Windows) + +SideFX has only included SQLite v 3.31.0 with H18.5+ and their support has told me they have no plans to upgrade it to 3.33.0 (which has FTS5 enabled (Full-Text Search), which is needed by Searcher). Because of this, an extra step is required to install/use Searcher with Houdini 18.5+ until/unless they decide to include SQLite 3.33.0 instead of 3.31.0. Download: Windows x64: [SQLite v3.33.0](https://www.sqlite.org/2020/sqlite-dll-win64-x64-3330000.zip) + - Extract the downloaded sqlite-dll-win64-x64-3330000.zip file, then in another window browse to your Houdini installation directory: aka `$HFS/bin`. By default this is located at: `C:\Program Files\SideFX\Houdini19.0.xxx\bin` - In the Houdini $HFS/bin folder, locate the `sqlite3.dll` file and either make a backup copy to save elsewhere (just in case), or simply rename it to `sqlite3.dll.bak` @@ -33,37 +48,42 @@ Linux: [SQLite v3.33.0](https://www.sqlite.org/2020/sqlite-tools-linux-x86-33300 MacOS: [SQLite v3.33.0](https://www.sqlite.org/2020/sqlite-tools-osx-x86-3330000.zip) --- + #### Install 1. Extract/unzip 'Searcher_\.zip'. (ex. Searcher_{#version}.zip) Inside will be a packages and Searcher folder as well as a README.md and a url link to this page. - ``` - Searcher_{#version}.zip / - Searcher_{#version}/__ /packages/ - |_ /Searcher/ - |__ README.md - |__ Searcher_install_instructions.url + + ``` + Searcher_{#version}.zip / + Searcher_{#version}/__ /packages/ + |_ /Searcher/ + |__ README.md + |__ Searcher_install_instructions.url ``` 2. Move the Searcher folder somewhere permanent. It can be placed where ever you would like. Make note of the folder path, as it will be needed in a later step. Examples below: - | OS | Path (replace \ with your actual username) | - | -------------------------------------------- | ------------------------------------------------- | - | Windows: | C:\Users\\\\houdini_addons\Searcher | - | Linux: | /home/\/houdini_addons/Searcher | - | MacOS: | /Users/\/Library/h_addons/Searcher | + | OS | Path (replace \ with your actual username) | + | -------------------------------------------- | ------------------------------------------------- | + | Windows: | C:\Users\\\\houdini_addons\Searcher | + | Linux: | /home/\/houdini_addons/Searcher | + | MacOS: | /Users/\/Library/h_addons/Searcher | 3. Move the 'packages' folder into your Houdini $HOME directory. The locations are seen below: - | OS | Path (replace \ with your actual username) | - | -------------------------------------------- | ---------------------------------------------------------------------- | - | Windows: | C:\Users\\\\Documents\houdini18.0\packages\Searcher.json | - | Linux: | /home/\/houdini18.0/packages/Searcher.json | - | MacOS: | /Users/\/Library/Preferences/houdini/18.0/packages/Searcher.json | + + | OS | Path (replace \ with your actual username) | + | -------------------------------------------- | ---------------------------------------------------------------------- | + | Windows: | C:\Users\\\\Documents\houdini18.0\packages\Searcher.json | + | Linux: | /home/\/houdini18.0/packages/Searcher.json | + | MacOS: | /Users/\/Library/Preferences/houdini/18.0/packages/Searcher.json | 4. Within the 'packages' folder is the Searcher.json file. Open this file in your editor of choice and edit line #27. Within the second set of quotation marks input the path to the Searcher folder from step #2. On Windows, replace the backslashes (\\) in the path with a forwardslashs (/) -Ex. If using Windows and following the example listed above, line 27 would look like this: - ``` - "SEARCHERLOCATION": "C:/Users//houdini_addons/Searcher", - ``` + Ex. If using Windows and following the example listed above, line 27 would look like this: + + ``` + "SEARCHERLOCATION": "C:/Users//houdini_addons/Searcher", + ``` + 5. Save the file and start Houdini. On the main shelf toolbar add the Searcher shelf by clicking on the plus(+) button, then the Shelves tab, followed by selecting "Searcher Shelf" seen in the images below: ![](https://i.imgur.com/GzdyUYt.png) ![](https://i.imgur.com/F4C5MOx.png) @@ -80,12 +100,11 @@ Ex. If using Windows and following the example listed above, line 27 would look | ![](https://i.imgur.com/h9Nefqz.png) Houdini: | Version: 18.0.348 and up | ##### Tested versions + | OS | Versions Tested | | ---------- | -------------------------------------- | | ⊞ Windows: | Windows 10 Pro v10.0.19041.264 (v2004) | -| 🐧 Linux: | Pop_OS! (Ubuntu) 19.04/19.10/20.04 | -| 🍎 MacOS: | Possibly coming soon, if requested | - - +| 🐧 Linux: | Pop_OS! (Ubuntu) 19.04/19.10/20.04 | +| 🍎 MacOS: | Possibly coming soon, if requested | [website](https://instance.id/) | [twitter](https://twitter.com/instance_id) | [github](https://github.com/instance-id) | [issues](https://github.com/instance-id/searcher_addon/issues?q=) | [email](https://github.com/instance-id/searcher_addon/issues?q=) diff --git a/build.ps1 b/build.ps1 index 5f64ba2..a859c97 100644 --- a/build.ps1 +++ b/build.ps1 @@ -16,17 +16,18 @@ if ($Version) { $date = Get-Date -Format 'yyyy-MM-dd_HH-mm-ss' $exclude = '--exclude-from=build/exclude.excl' $include = '--include-from=build/include.incl' -$config = '--config=C:/Users/mosthated/.backup/rclone.conf' $cmd = '' $source1 = '' $destination1 = '' if ($IsWindows) { + $config = '--config=C:/Users/mosthated/.backup/rclone.conf' $cmd = 'C:\files\rclone\rclone.exe' $source1 = 'E:\GitHub\Searcher\' $destination1 = 'E:\Searcher' $log1 = "--log-file=C:\files\rclone\logs\Searcher_Build_$date.log" } elseif ($IsLinux) { + $config = '--config=/home/mosthated/.config/rclone/rclone.conf' $cmd = 'rclone' $source1 = '/mnt/x/GitHub/instance-id/1_Projects/Searcher' $destination1 = '/mnt/x/_dev/Searcher' @@ -71,6 +72,7 @@ if ($Version) { Move-Item -Path $destination1\help -Destination $folderVer\$searcher\help Move-Item -Path $destination1\python2.7libs -Destination $folderVer\$searcher\python2.7libs Move-Item -Path $destination1\python3.7libs -Destination $folderVer\$searcher\python3.7libs + Move-Item -Path $destination1\python3.9libs -Destination $folderVer\$searcher\python3.9libs Move-Item -Path $destination1\toolbar -Destination $folderVer\$searcher\toolbar $listfiles = Get-ChildItem $folderVer -Recurse -File -Include '*.md', '*.txt' diff --git a/python3.9libs/Qt.py b/python3.9libs/Qt.py new file mode 100644 index 0000000..fe4b45f --- /dev/null +++ b/python3.9libs/Qt.py @@ -0,0 +1,1989 @@ +"""Minimal Python 2 & 3 shim around all Qt bindings + +DOCUMENTATION + Qt.py was born in the film and visual effects industry to address + the growing need for the development of software capable of running + with more than one flavour of the Qt bindings for Python - PySide, + PySide2, PyQt4 and PyQt5. + + 1. Build for one, run with all + 2. Explicit is better than implicit + 3. Support co-existence + + Default resolution order: + - PySide2 + - PyQt5 + - PySide + - PyQt4 + + Usage: + >> import sys + >> from Qt import QtWidgets + >> app = QtWidgets.QApplication(sys.argv) + >> button = QtWidgets.QPushButton("Hello World") + >> button.show() + >> app.exec_() + + All members of PySide2 are mapped from other bindings, should they exist. + If no equivalent member exist, it is excluded from Qt.py and inaccessible. + The idea is to highlight members that exist across all supported binding, + and guarantee that code that runs on one binding runs on all others. + + For more details, visit https://github.com/mottosso/Qt.py + +LICENSE + + See end of file for license (MIT, BSD) information. + +""" + +import os +import sys +import types +import shutil +import importlib + + +__version__ = "1.2.3" + +# Enable support for `from Qt import *` +__all__ = [] + +# Flags from environment variables +QT_VERBOSE = bool(os.getenv("QT_VERBOSE")) +QT_PREFERRED_BINDING = os.getenv("QT_PREFERRED_BINDING", "") +QT_SIP_API_HINT = os.getenv("QT_SIP_API_HINT") + +# Reference to Qt.py +Qt = sys.modules[__name__] +Qt.QtCompat = types.ModuleType("QtCompat") + +try: + long +except NameError: + # Python 3 compatibility + long = int + + +"""Common members of all bindings + +This is where each member of Qt.py is explicitly defined. +It is based on a "lowest common denominator" of all bindings; +including members found in each of the 4 bindings. + +The "_common_members" dictionary is generated using the +build_membership.sh script. + +""" + +_common_members = { + "QtCore": [ + "QAbstractAnimation", + "QAbstractEventDispatcher", + "QAbstractItemModel", + "QAbstractListModel", + "QAbstractState", + "QAbstractTableModel", + "QAbstractTransition", + "QAnimationGroup", + "QBasicTimer", + "QBitArray", + "QBuffer", + "QByteArray", + "QByteArrayMatcher", + "QChildEvent", + "QCoreApplication", + "QCryptographicHash", + "QDataStream", + "QDate", + "QDateTime", + "QDir", + "QDirIterator", + "QDynamicPropertyChangeEvent", + "QEasingCurve", + "QElapsedTimer", + "QEvent", + "QEventLoop", + "QEventTransition", + "QFile", + "QFileInfo", + "QFileSystemWatcher", + "QFinalState", + "QGenericArgument", + "QGenericReturnArgument", + "QHistoryState", + "QItemSelectionRange", + "QIODevice", + "QLibraryInfo", + "QLine", + "QLineF", + "QLocale", + "QMargins", + "QMetaClassInfo", + "QMetaEnum", + "QMetaMethod", + "QMetaObject", + "QMetaProperty", + "QMimeData", + "QModelIndex", + "QMutex", + "QMutexLocker", + "QObject", + "QParallelAnimationGroup", + "QPauseAnimation", + "QPersistentModelIndex", + "QPluginLoader", + "QPoint", + "QPointF", + "QProcess", + "QProcessEnvironment", + "QPropertyAnimation", + "QReadLocker", + "QReadWriteLock", + "QRect", + "QRectF", + "QRegExp", + "QResource", + "QRunnable", + "QSemaphore", + "QSequentialAnimationGroup", + "QSettings", + "QSignalMapper", + "QSignalTransition", + "QSize", + "QSizeF", + "QSocketNotifier", + "QState", + "QStateMachine", + "QSysInfo", + "QSystemSemaphore", + "QT_TRANSLATE_NOOP", + "QT_TR_NOOP", + "QT_TR_NOOP_UTF8", + "QTemporaryFile", + "QTextBoundaryFinder", + "QTextCodec", + "QTextDecoder", + "QTextEncoder", + "QTextStream", + "QTextStreamManipulator", + "QThread", + "QThreadPool", + "QTime", + "QTimeLine", + "QTimer", + "QTimerEvent", + "QTranslator", + "QUrl", + "QVariantAnimation", + "QWaitCondition", + "QWriteLocker", + "QXmlStreamAttribute", + "QXmlStreamAttributes", + "QXmlStreamEntityDeclaration", + "QXmlStreamEntityResolver", + "QXmlStreamNamespaceDeclaration", + "QXmlStreamNotationDeclaration", + "QXmlStreamReader", + "QXmlStreamWriter", + "Qt", + "QtCriticalMsg", + "QtDebugMsg", + "QtFatalMsg", + "QtMsgType", + "QtSystemMsg", + "QtWarningMsg", + "qAbs", + "qAddPostRoutine", + "qChecksum", + "qCritical", + "qDebug", + "qFatal", + "qFuzzyCompare", + "qIsFinite", + "qIsInf", + "qIsNaN", + "qIsNull", + "qRegisterResourceData", + "qUnregisterResourceData", + "qVersion", + "qWarning", + "qrand", + "qsrand" + ], + "QtGui": [ + "QAbstractTextDocumentLayout", + "QActionEvent", + "QBitmap", + "QBrush", + "QClipboard", + "QCloseEvent", + "QColor", + "QConicalGradient", + "QContextMenuEvent", + "QCursor", + "QDesktopServices", + "QDoubleValidator", + "QDrag", + "QDragEnterEvent", + "QDragLeaveEvent", + "QDragMoveEvent", + "QDropEvent", + "QFileOpenEvent", + "QFocusEvent", + "QFont", + "QFontDatabase", + "QFontInfo", + "QFontMetrics", + "QFontMetricsF", + "QGradient", + "QHelpEvent", + "QHideEvent", + "QHoverEvent", + "QIcon", + "QIconDragEvent", + "QIconEngine", + "QImage", + "QImageIOHandler", + "QImageReader", + "QImageWriter", + "QInputEvent", + "QInputMethodEvent", + "QIntValidator", + "QKeyEvent", + "QKeySequence", + "QLinearGradient", + "QMatrix2x2", + "QMatrix2x3", + "QMatrix2x4", + "QMatrix3x2", + "QMatrix3x3", + "QMatrix3x4", + "QMatrix4x2", + "QMatrix4x3", + "QMatrix4x4", + "QMouseEvent", + "QMoveEvent", + "QMovie", + "QPaintDevice", + "QPaintEngine", + "QPaintEngineState", + "QPaintEvent", + "QPainter", + "QPainterPath", + "QPainterPathStroker", + "QPalette", + "QPen", + "QPicture", + "QPictureIO", + "QPixmap", + "QPixmapCache", + "QPolygon", + "QPolygonF", + "QQuaternion", + "QRadialGradient", + "QRegExpValidator", + "QRegion", + "QResizeEvent", + "QSessionManager", + "QShortcutEvent", + "QShowEvent", + "QStandardItem", + "QStandardItemModel", + "QStatusTipEvent", + "QSyntaxHighlighter", + "QTabletEvent", + "QTextBlock", + "QTextBlockFormat", + "QTextBlockGroup", + "QTextBlockUserData", + "QTextCharFormat", + "QTextCursor", + "QTextDocument", + "QTextDocumentFragment", + "QTextFormat", + "QTextFragment", + "QTextFrame", + "QTextFrameFormat", + "QTextImageFormat", + "QTextInlineObject", + "QTextItem", + "QTextLayout", + "QTextLength", + "QTextLine", + "QTextList", + "QTextListFormat", + "QTextObject", + "QTextObjectInterface", + "QTextOption", + "QTextTable", + "QTextTableCell", + "QTextTableCellFormat", + "QTextTableFormat", + "QTouchEvent", + "QTransform", + "QValidator", + "QVector2D", + "QVector3D", + "QVector4D", + "QWhatsThisClickedEvent", + "QWheelEvent", + "QWindowStateChangeEvent", + "qAlpha", + "qBlue", + "qGray", + "qGreen", + "qIsGray", + "qRed", + "qRgb", + "qRgba" + ], + "QtHelp": [ + "QHelpContentItem", + "QHelpContentModel", + "QHelpContentWidget", + "QHelpEngine", + "QHelpEngineCore", + "QHelpIndexModel", + "QHelpIndexWidget", + "QHelpSearchEngine", + "QHelpSearchQuery", + "QHelpSearchQueryWidget", + "QHelpSearchResultWidget" + ], + "QtMultimedia": [ + "QAbstractVideoBuffer", + "QAbstractVideoSurface", + "QAudio", + "QAudioDeviceInfo", + "QAudioFormat", + "QAudioInput", + "QAudioOutput", + "QVideoFrame", + "QVideoSurfaceFormat" + ], + "QtNetwork": [ + "QAbstractNetworkCache", + "QAbstractSocket", + "QAuthenticator", + "QHostAddress", + "QHostInfo", + "QLocalServer", + "QLocalSocket", + "QNetworkAccessManager", + "QNetworkAddressEntry", + "QNetworkCacheMetaData", + "QNetworkConfiguration", + "QNetworkConfigurationManager", + "QNetworkCookie", + "QNetworkCookieJar", + "QNetworkDiskCache", + "QNetworkInterface", + "QNetworkProxy", + "QNetworkProxyFactory", + "QNetworkProxyQuery", + "QNetworkReply", + "QNetworkRequest", + "QNetworkSession", + "QSsl", + "QTcpServer", + "QTcpSocket", + "QUdpSocket" + ], + "QtOpenGL": [ + "QGL", + "QGLContext", + "QGLFormat", + "QGLWidget" + ], + "QtPrintSupport": [ + "QAbstractPrintDialog", + "QPageSetupDialog", + "QPrintDialog", + "QPrintEngine", + "QPrintPreviewDialog", + "QPrintPreviewWidget", + "QPrinter", + "QPrinterInfo" + ], + "QtSql": [ + "QSql", + "QSqlDatabase", + "QSqlDriver", + "QSqlDriverCreatorBase", + "QSqlError", + "QSqlField", + "QSqlIndex", + "QSqlQuery", + "QSqlQueryModel", + "QSqlRecord", + "QSqlRelation", + "QSqlRelationalDelegate", + "QSqlRelationalTableModel", + "QSqlResult", + "QSqlTableModel" + ], + "QtSvg": [ + "QGraphicsSvgItem", + "QSvgGenerator", + "QSvgRenderer", + "QSvgWidget" + ], + "QtTest": [ + "QTest" + ], + "QtWidgets": [ + "QAbstractButton", + "QAbstractGraphicsShapeItem", + "QAbstractItemDelegate", + "QAbstractItemView", + "QAbstractScrollArea", + "QAbstractSlider", + "QAbstractSpinBox", + "QAction", + "QActionGroup", + "QApplication", + "QBoxLayout", + "QButtonGroup", + "QCalendarWidget", + "QCheckBox", + "QColorDialog", + "QColumnView", + "QComboBox", + "QCommandLinkButton", + "QCommonStyle", + "QCompleter", + "QDataWidgetMapper", + "QDateEdit", + "QDateTimeEdit", + "QDesktopWidget", + "QDial", + "QDialog", + "QDialogButtonBox", + "QDirModel", + "QDockWidget", + "QDoubleSpinBox", + "QErrorMessage", + "QFileDialog", + "QFileIconProvider", + "QFileSystemModel", + "QFocusFrame", + "QFontComboBox", + "QFontDialog", + "QFormLayout", + "QFrame", + "QGesture", + "QGestureEvent", + "QGestureRecognizer", + "QGraphicsAnchor", + "QGraphicsAnchorLayout", + "QGraphicsBlurEffect", + "QGraphicsColorizeEffect", + "QGraphicsDropShadowEffect", + "QGraphicsEffect", + "QGraphicsEllipseItem", + "QGraphicsGridLayout", + "QGraphicsItem", + "QGraphicsItemGroup", + "QGraphicsLayout", + "QGraphicsLayoutItem", + "QGraphicsLineItem", + "QGraphicsLinearLayout", + "QGraphicsObject", + "QGraphicsOpacityEffect", + "QGraphicsPathItem", + "QGraphicsPixmapItem", + "QGraphicsPolygonItem", + "QGraphicsProxyWidget", + "QGraphicsRectItem", + "QGraphicsRotation", + "QGraphicsScale", + "QGraphicsScene", + "QGraphicsSceneContextMenuEvent", + "QGraphicsSceneDragDropEvent", + "QGraphicsSceneEvent", + "QGraphicsSceneHelpEvent", + "QGraphicsSceneHoverEvent", + "QGraphicsSceneMouseEvent", + "QGraphicsSceneMoveEvent", + "QGraphicsSceneResizeEvent", + "QGraphicsSceneWheelEvent", + "QGraphicsSimpleTextItem", + "QGraphicsTextItem", + "QGraphicsTransform", + "QGraphicsView", + "QGraphicsWidget", + "QGridLayout", + "QGroupBox", + "QHBoxLayout", + "QHeaderView", + "QInputDialog", + "QItemDelegate", + "QItemEditorCreatorBase", + "QItemEditorFactory", + "QKeyEventTransition", + "QLCDNumber", + "QLabel", + "QLayout", + "QLayoutItem", + "QLineEdit", + "QListView", + "QListWidget", + "QListWidgetItem", + "QMainWindow", + "QMdiArea", + "QMdiSubWindow", + "QMenu", + "QMenuBar", + "QMessageBox", + "QMouseEventTransition", + "QPanGesture", + "QPinchGesture", + "QPlainTextDocumentLayout", + "QPlainTextEdit", + "QProgressBar", + "QProgressDialog", + "QPushButton", + "QRadioButton", + "QRubberBand", + "QScrollArea", + "QScrollBar", + "QShortcut", + "QSizeGrip", + "QSizePolicy", + "QSlider", + "QSpacerItem", + "QSpinBox", + "QSplashScreen", + "QSplitter", + "QSplitterHandle", + "QStackedLayout", + "QStackedWidget", + "QStatusBar", + "QStyle", + "QStyleFactory", + "QStyleHintReturn", + "QStyleHintReturnMask", + "QStyleHintReturnVariant", + "QStyleOption", + "QStyleOptionButton", + "QStyleOptionComboBox", + "QStyleOptionComplex", + "QStyleOptionDockWidget", + "QStyleOptionFocusRect", + "QStyleOptionFrame", + "QStyleOptionGraphicsItem", + "QStyleOptionGroupBox", + "QStyleOptionHeader", + "QStyleOptionMenuItem", + "QStyleOptionProgressBar", + "QStyleOptionRubberBand", + "QStyleOptionSizeGrip", + "QStyleOptionSlider", + "QStyleOptionSpinBox", + "QStyleOptionTab", + "QStyleOptionTabBarBase", + "QStyleOptionTabWidgetFrame", + "QStyleOptionTitleBar", + "QStyleOptionToolBar", + "QStyleOptionToolBox", + "QStyleOptionToolButton", + "QStyleOptionViewItem", + "QStylePainter", + "QStyledItemDelegate", + "QSwipeGesture", + "QSystemTrayIcon", + "QTabBar", + "QTabWidget", + "QTableView", + "QTableWidget", + "QTableWidgetItem", + "QTableWidgetSelectionRange", + "QTapAndHoldGesture", + "QTapGesture", + "QTextBrowser", + "QTextEdit", + "QTimeEdit", + "QToolBar", + "QToolBox", + "QToolButton", + "QToolTip", + "QTreeView", + "QTreeWidget", + "QTreeWidgetItem", + "QTreeWidgetItemIterator", + "QUndoCommand", + "QUndoGroup", + "QUndoStack", + "QUndoView", + "QVBoxLayout", + "QWhatsThis", + "QWidget", + "QWidgetAction", + "QWidgetItem", + "QWizard", + "QWizardPage" + ], + "QtX11Extras": [ + "QX11Info" + ], + "QtXml": [ + "QDomAttr", + "QDomCDATASection", + "QDomCharacterData", + "QDomComment", + "QDomDocument", + "QDomDocumentFragment", + "QDomDocumentType", + "QDomElement", + "QDomEntity", + "QDomEntityReference", + "QDomImplementation", + "QDomNamedNodeMap", + "QDomNode", + "QDomNodeList", + "QDomNotation", + "QDomProcessingInstruction", + "QDomText", + "QXmlAttributes", + "QXmlContentHandler", + "QXmlDTDHandler", + "QXmlDeclHandler", + "QXmlDefaultHandler", + "QXmlEntityResolver", + "QXmlErrorHandler", + "QXmlInputSource", + "QXmlLexicalHandler", + "QXmlLocator", + "QXmlNamespaceSupport", + "QXmlParseException", + "QXmlReader", + "QXmlSimpleReader" + ], + "QtXmlPatterns": [ + "QAbstractMessageHandler", + "QAbstractUriResolver", + "QAbstractXmlNodeModel", + "QAbstractXmlReceiver", + "QSourceLocation", + "QXmlFormatter", + "QXmlItem", + "QXmlName", + "QXmlNamePool", + "QXmlNodeModelIndex", + "QXmlQuery", + "QXmlResultItems", + "QXmlSchema", + "QXmlSchemaValidator", + "QXmlSerializer" + ] +} + +""" Missing members + +This mapping describes members that have been deprecated +in one or more bindings and have been left out of the +_common_members mapping. + +The member can provide an extra details string to be +included in exceptions and warnings. +""" + +_missing_members = { + "QtGui": { + "QMatrix": "Deprecated in PyQt5", + }, +} + + +def _qInstallMessageHandler(handler): + """Install a message handler that works in all bindings + + Args: + handler: A function that takes 3 arguments, or None + """ + def messageOutputHandler(*args): + # In Qt4 bindings, message handlers are passed 2 arguments + # In Qt5 bindings, message handlers are passed 3 arguments + # The first argument is a QtMsgType + # The last argument is the message to be printed + # The Middle argument (if passed) is a QMessageLogContext + if len(args) == 3: + msgType, logContext, msg = args + elif len(args) == 2: + msgType, msg = args + logContext = None + else: + raise TypeError( + "handler expected 2 or 3 arguments, got {0}".format(len(args))) + + if isinstance(msg, bytes): + # In python 3, some bindings pass a bytestring, which cannot be + # used elsewhere. Decoding a python 2 or 3 bytestring object will + # consistently return a unicode object. + msg = msg.decode() + + handler(msgType, logContext, msg) + + passObject = messageOutputHandler if handler else handler + if Qt.IsPySide or Qt.IsPyQt4: + return Qt._QtCore.qInstallMsgHandler(passObject) + elif Qt.IsPySide2 or Qt.IsPyQt5: + return Qt._QtCore.qInstallMessageHandler(passObject) + + +def _getcpppointer(object): + if hasattr(Qt, "_shiboken2"): + return getattr(Qt, "_shiboken2").getCppPointer(object)[0] + elif hasattr(Qt, "_shiboken"): + return getattr(Qt, "_shiboken").getCppPointer(object)[0] + elif hasattr(Qt, "_sip"): + return getattr(Qt, "_sip").unwrapinstance(object) + raise AttributeError("'module' has no attribute 'getCppPointer'") + + +def _wrapinstance(ptr, base=None): + """Enable implicit cast of pointer to most suitable class + + This behaviour is available in sip per default. + + Based on http://nathanhorne.com/pyqtpyside-wrap-instance + + Usage: + This mechanism kicks in under these circumstances. + 1. Qt.py is using PySide 1 or 2. + 2. A `base` argument is not provided. + + See :func:`QtCompat.wrapInstance()` + + Arguments: + ptr (long): Pointer to QObject in memory + base (QObject, optional): Base class to wrap with. Defaults to QObject, + which should handle anything. + + """ + + assert isinstance(ptr, long), "Argument 'ptr' must be of type " + assert (base is None) or issubclass(base, Qt.QtCore.QObject), ( + "Argument 'base' must be of type ") + + if Qt.IsPyQt4 or Qt.IsPyQt5: + func = getattr(Qt, "_sip").wrapinstance + elif Qt.IsPySide2: + func = getattr(Qt, "_shiboken2").wrapInstance + elif Qt.IsPySide: + func = getattr(Qt, "_shiboken").wrapInstance + else: + raise AttributeError("'module' has no attribute 'wrapInstance'") + + if base is None: + q_object = func(long(ptr), Qt.QtCore.QObject) + meta_object = q_object.metaObject() + class_name = meta_object.className() + super_class_name = meta_object.superClass().className() + + if hasattr(Qt.QtWidgets, class_name): + base = getattr(Qt.QtWidgets, class_name) + + elif hasattr(Qt.QtWidgets, super_class_name): + base = getattr(Qt.QtWidgets, super_class_name) + + else: + base = Qt.QtCore.QObject + + return func(long(ptr), base) + + +def _isvalid(object): + """Check if the object is valid to use in Python runtime. + + Usage: + See :func:`QtCompat.isValid()` + + Arguments: + object (QObject): QObject to check the validity of. + + """ + + assert isinstance(object, Qt.QtCore.QObject) + + if hasattr(Qt, "_shiboken2"): + return getattr(Qt, "_shiboken2").isValid(object) + + elif hasattr(Qt, "_shiboken"): + return getattr(Qt, "_shiboken").isValid(object) + + elif hasattr(Qt, "_sip"): + return not getattr(Qt, "_sip").isdeleted(object) + + else: + raise AttributeError("'module' has no attribute isValid") + + +def _translate(context, sourceText, *args): + # In Qt4 bindings, translate can be passed 2 or 3 arguments + # In Qt5 bindings, translate can be passed 2 arguments + # The first argument is disambiguation[str] + # The last argument is n[int] + # The middle argument can be encoding[QtCore.QCoreApplication.Encoding] + if len(args) == 3: + disambiguation, encoding, n = args + elif len(args) == 2: + disambiguation, n = args + encoding = None + else: + raise TypeError( + "Expected 4 or 5 arguments, got {0}.".format(len(args) + 2)) + + if hasattr(Qt.QtCore, "QCoreApplication"): + app = getattr(Qt.QtCore, "QCoreApplication") + else: + raise NotImplementedError( + "Missing QCoreApplication implementation for {binding}".format( + binding=Qt.__binding__, + ) + ) + if Qt.__binding__ in ("PySide2", "PyQt5"): + sanitized_args = [context, sourceText, disambiguation, n] + else: + sanitized_args = [ + context, + sourceText, + disambiguation, + encoding or app.CodecForTr, + n + ] + return app.translate(*sanitized_args) + + +def _loadUi(uifile, baseinstance=None): + """Dynamically load a user interface from the given `uifile` + + This function calls `uic.loadUi` if using PyQt bindings, + else it implements a comparable binding for PySide. + + Documentation: + http://pyqt.sourceforge.net/Docs/PyQt5/designer.html#PyQt5.uic.loadUi + + Arguments: + uifile (str): Absolute path to Qt Designer file. + baseinstance (QWidget): Instantiated QWidget or subclass thereof + + Return: + baseinstance if `baseinstance` is not `None`. Otherwise + return the newly created instance of the user interface. + + """ + if hasattr(Qt, "_uic"): + return Qt._uic.loadUi(uifile, baseinstance) + + elif hasattr(Qt, "_QtUiTools"): + # Implement `PyQt5.uic.loadUi` for PySide(2) + + class _UiLoader(Qt._QtUiTools.QUiLoader): + """Create the user interface in a base instance. + + Unlike `Qt._QtUiTools.QUiLoader` itself this class does not + create a new instance of the top-level widget, but creates the user + interface in an existing instance of the top-level class if needed. + + This mimics the behaviour of `PyQt5.uic.loadUi`. + + """ + + def __init__(self, baseinstance): + super(_UiLoader, self).__init__(baseinstance) + self.baseinstance = baseinstance + self.custom_widgets = {} + + def _loadCustomWidgets(self, etree): + """ + Workaround to pyside-77 bug. + + From QUiLoader doc we should use registerCustomWidget method. + But this causes a segfault on some platforms. + + Instead we fetch from customwidgets DOM node the python class + objects. Then we can directly use them in createWidget method. + """ + + def headerToModule(header): + """ + Translate a header file to python module path + foo/bar.h => foo.bar + """ + # Remove header extension + module = os.path.splitext(header)[0] + + # Replace os separator by python module separator + return module.replace("/", ".").replace("\\", ".") + + custom_widgets = etree.find("customwidgets") + + if custom_widgets is None: + return + + for custom_widget in custom_widgets: + class_name = custom_widget.find("class").text + header = custom_widget.find("header").text + module = importlib.import_module(headerToModule(header)) + self.custom_widgets[class_name] = getattr(module, + class_name) + + def load(self, uifile, *args, **kwargs): + from xml.etree.ElementTree import ElementTree + + # For whatever reason, if this doesn't happen then + # reading an invalid or non-existing .ui file throws + # a RuntimeError. + etree = ElementTree() + etree.parse(uifile) + self._loadCustomWidgets(etree) + + widget = Qt._QtUiTools.QUiLoader.load( + self, uifile, *args, **kwargs) + + # Workaround for PySide 1.0.9, see issue #208 + widget.parentWidget() + + return widget + + def createWidget(self, class_name, parent=None, name=""): + """Called for each widget defined in ui file + + Overridden here to populate `baseinstance` instead. + + """ + + if parent is None and self.baseinstance: + # Supposed to create the top-level widget, + # return the base instance instead + return self.baseinstance + + # For some reason, Line is not in the list of available + # widgets, but works fine, so we have to special case it here. + if class_name in self.availableWidgets() + ["Line"]: + # Create a new widget for child widgets + widget = Qt._QtUiTools.QUiLoader.createWidget(self, + class_name, + parent, + name) + elif class_name in self.custom_widgets: + widget = self.custom_widgets[class_name](parent) + else: + raise Exception("Custom widget '%s' not supported" + % class_name) + + if self.baseinstance: + # Set an attribute for the new child widget on the base + # instance, just like PyQt5.uic.loadUi does. + setattr(self.baseinstance, name, widget) + + return widget + + widget = _UiLoader(baseinstance).load(uifile) + Qt.QtCore.QMetaObject.connectSlotsByName(widget) + + return widget + + else: + raise NotImplementedError("No implementation available for loadUi") + + +"""Misplaced members + +These members from the original submodule are misplaced relative PySide2 + +""" +_misplaced_members = { + "PySide2": { + "QtCore.QStringListModel": "QtCore.QStringListModel", + "QtGui.QStringListModel": "QtCore.QStringListModel", + "QtCore.Property": "QtCore.Property", + "QtCore.Signal": "QtCore.Signal", + "QtCore.Slot": "QtCore.Slot", + "QtCore.QAbstractProxyModel": "QtCore.QAbstractProxyModel", + "QtCore.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel", + "QtCore.QItemSelection": "QtCore.QItemSelection", + "QtCore.QItemSelectionModel": "QtCore.QItemSelectionModel", + "QtCore.QItemSelectionRange": "QtCore.QItemSelectionRange", + "QtUiTools.QUiLoader": ["QtCompat.loadUi", _loadUi], + "shiboken2.wrapInstance": ["QtCompat.wrapInstance", _wrapinstance], + "shiboken2.getCppPointer": ["QtCompat.getCppPointer", _getcpppointer], + "shiboken2.isValid": ["QtCompat.isValid", _isvalid], + "QtWidgets.qApp": "QtWidgets.QApplication.instance()", + "QtCore.QCoreApplication.translate": [ + "QtCompat.translate", _translate + ], + "QtWidgets.QApplication.translate": [ + "QtCompat.translate", _translate + ], + "QtCore.qInstallMessageHandler": [ + "QtCompat.qInstallMessageHandler", _qInstallMessageHandler + ], + "QtWidgets.QStyleOptionViewItem": "QtCompat.QStyleOptionViewItemV4", + }, + "PyQt5": { + "QtCore.pyqtProperty": "QtCore.Property", + "QtCore.pyqtSignal": "QtCore.Signal", + "QtCore.pyqtSlot": "QtCore.Slot", + "QtCore.QAbstractProxyModel": "QtCore.QAbstractProxyModel", + "QtCore.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel", + "QtCore.QStringListModel": "QtCore.QStringListModel", + "QtCore.QItemSelection": "QtCore.QItemSelection", + "QtCore.QItemSelectionModel": "QtCore.QItemSelectionModel", + "QtCore.QItemSelectionRange": "QtCore.QItemSelectionRange", + "uic.loadUi": ["QtCompat.loadUi", _loadUi], + "sip.wrapinstance": ["QtCompat.wrapInstance", _wrapinstance], + "sip.unwrapinstance": ["QtCompat.getCppPointer", _getcpppointer], + "sip.isdeleted": ["QtCompat.isValid", _isvalid], + "QtWidgets.qApp": "QtWidgets.QApplication.instance()", + "QtCore.QCoreApplication.translate": [ + "QtCompat.translate", _translate + ], + "QtWidgets.QApplication.translate": [ + "QtCompat.translate", _translate + ], + "QtCore.qInstallMessageHandler": [ + "QtCompat.qInstallMessageHandler", _qInstallMessageHandler + ], + "QtWidgets.QStyleOptionViewItem": "QtCompat.QStyleOptionViewItemV4", + }, + "PySide": { + "QtGui.QAbstractProxyModel": "QtCore.QAbstractProxyModel", + "QtGui.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel", + "QtGui.QStringListModel": "QtCore.QStringListModel", + "QtGui.QItemSelection": "QtCore.QItemSelection", + "QtGui.QItemSelectionModel": "QtCore.QItemSelectionModel", + "QtCore.Property": "QtCore.Property", + "QtCore.Signal": "QtCore.Signal", + "QtCore.Slot": "QtCore.Slot", + "QtGui.QItemSelectionRange": "QtCore.QItemSelectionRange", + "QtGui.QAbstractPrintDialog": "QtPrintSupport.QAbstractPrintDialog", + "QtGui.QPageSetupDialog": "QtPrintSupport.QPageSetupDialog", + "QtGui.QPrintDialog": "QtPrintSupport.QPrintDialog", + "QtGui.QPrintEngine": "QtPrintSupport.QPrintEngine", + "QtGui.QPrintPreviewDialog": "QtPrintSupport.QPrintPreviewDialog", + "QtGui.QPrintPreviewWidget": "QtPrintSupport.QPrintPreviewWidget", + "QtGui.QPrinter": "QtPrintSupport.QPrinter", + "QtGui.QPrinterInfo": "QtPrintSupport.QPrinterInfo", + "QtUiTools.QUiLoader": ["QtCompat.loadUi", _loadUi], + "shiboken.wrapInstance": ["QtCompat.wrapInstance", _wrapinstance], + "shiboken.unwrapInstance": ["QtCompat.getCppPointer", _getcpppointer], + "shiboken.isValid": ["QtCompat.isValid", _isvalid], + "QtGui.qApp": "QtWidgets.QApplication.instance()", + "QtCore.QCoreApplication.translate": [ + "QtCompat.translate", _translate + ], + "QtGui.QApplication.translate": [ + "QtCompat.translate", _translate + ], + "QtCore.qInstallMsgHandler": [ + "QtCompat.qInstallMessageHandler", _qInstallMessageHandler + ], + "QtGui.QStyleOptionViewItemV4": "QtCompat.QStyleOptionViewItemV4", + }, + "PyQt4": { + "QtGui.QAbstractProxyModel": "QtCore.QAbstractProxyModel", + "QtGui.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel", + "QtGui.QItemSelection": "QtCore.QItemSelection", + "QtGui.QStringListModel": "QtCore.QStringListModel", + "QtGui.QItemSelectionModel": "QtCore.QItemSelectionModel", + "QtCore.pyqtProperty": "QtCore.Property", + "QtCore.pyqtSignal": "QtCore.Signal", + "QtCore.pyqtSlot": "QtCore.Slot", + "QtGui.QItemSelectionRange": "QtCore.QItemSelectionRange", + "QtGui.QAbstractPrintDialog": "QtPrintSupport.QAbstractPrintDialog", + "QtGui.QPageSetupDialog": "QtPrintSupport.QPageSetupDialog", + "QtGui.QPrintDialog": "QtPrintSupport.QPrintDialog", + "QtGui.QPrintEngine": "QtPrintSupport.QPrintEngine", + "QtGui.QPrintPreviewDialog": "QtPrintSupport.QPrintPreviewDialog", + "QtGui.QPrintPreviewWidget": "QtPrintSupport.QPrintPreviewWidget", + "QtGui.QPrinter": "QtPrintSupport.QPrinter", + "QtGui.QPrinterInfo": "QtPrintSupport.QPrinterInfo", + # "QtCore.pyqtSignature": "QtCore.Slot", + "uic.loadUi": ["QtCompat.loadUi", _loadUi], + "sip.wrapinstance": ["QtCompat.wrapInstance", _wrapinstance], + "sip.unwrapinstance": ["QtCompat.getCppPointer", _getcpppointer], + "sip.isdeleted": ["QtCompat.isValid", _isvalid], + "QtCore.QString": "str", + "QtGui.qApp": "QtWidgets.QApplication.instance()", + "QtCore.QCoreApplication.translate": [ + "QtCompat.translate", _translate + ], + "QtGui.QApplication.translate": [ + "QtCompat.translate", _translate + ], + "QtCore.qInstallMsgHandler": [ + "QtCompat.qInstallMessageHandler", _qInstallMessageHandler + ], + "QtGui.QStyleOptionViewItemV4": "QtCompat.QStyleOptionViewItemV4", + } +} + +""" Compatibility Members + +This dictionary is used to build Qt.QtCompat objects that provide a consistent +interface for obsolete members, and differences in binding return values. + +{ + "binding": { + "classname": { + "targetname": "binding_namespace", + } + } +} +""" +_compatibility_members = { + "PySide2": { + "QWidget": { + "grab": "QtWidgets.QWidget.grab", + }, + "QHeaderView": { + "sectionsClickable": "QtWidgets.QHeaderView.sectionsClickable", + "setSectionsClickable": + "QtWidgets.QHeaderView.setSectionsClickable", + "sectionResizeMode": "QtWidgets.QHeaderView.sectionResizeMode", + "setSectionResizeMode": + "QtWidgets.QHeaderView.setSectionResizeMode", + "sectionsMovable": "QtWidgets.QHeaderView.sectionsMovable", + "setSectionsMovable": "QtWidgets.QHeaderView.setSectionsMovable", + }, + "QFileDialog": { + "getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName", + "getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames", + "getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName", + }, + }, + "PyQt5": { + "QWidget": { + "grab": "QtWidgets.QWidget.grab", + }, + "QHeaderView": { + "sectionsClickable": "QtWidgets.QHeaderView.sectionsClickable", + "setSectionsClickable": + "QtWidgets.QHeaderView.setSectionsClickable", + "sectionResizeMode": "QtWidgets.QHeaderView.sectionResizeMode", + "setSectionResizeMode": + "QtWidgets.QHeaderView.setSectionResizeMode", + "sectionsMovable": "QtWidgets.QHeaderView.sectionsMovable", + "setSectionsMovable": "QtWidgets.QHeaderView.setSectionsMovable", + }, + "QFileDialog": { + "getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName", + "getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames", + "getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName", + }, + }, + "PySide": { + "QWidget": { + "grab": "QtWidgets.QPixmap.grabWidget", + }, + "QHeaderView": { + "sectionsClickable": "QtWidgets.QHeaderView.isClickable", + "setSectionsClickable": "QtWidgets.QHeaderView.setClickable", + "sectionResizeMode": "QtWidgets.QHeaderView.resizeMode", + "setSectionResizeMode": "QtWidgets.QHeaderView.setResizeMode", + "sectionsMovable": "QtWidgets.QHeaderView.isMovable", + "setSectionsMovable": "QtWidgets.QHeaderView.setMovable", + }, + "QFileDialog": { + "getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName", + "getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames", + "getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName", + }, + }, + "PyQt4": { + "QWidget": { + "grab": "QtWidgets.QPixmap.grabWidget", + }, + "QHeaderView": { + "sectionsClickable": "QtWidgets.QHeaderView.isClickable", + "setSectionsClickable": "QtWidgets.QHeaderView.setClickable", + "sectionResizeMode": "QtWidgets.QHeaderView.resizeMode", + "setSectionResizeMode": "QtWidgets.QHeaderView.setResizeMode", + "sectionsMovable": "QtWidgets.QHeaderView.isMovable", + "setSectionsMovable": "QtWidgets.QHeaderView.setMovable", + }, + "QFileDialog": { + "getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName", + "getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames", + "getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName", + }, + }, +} + + +def _apply_site_config(): + try: + import QtSiteConfig + except ImportError: + # If no QtSiteConfig module found, no modifications + # to _common_members are needed. + pass + else: + # Provide the ability to modify the dicts used to build Qt.py + if hasattr(QtSiteConfig, 'update_members'): + QtSiteConfig.update_members(_common_members) + + if hasattr(QtSiteConfig, 'update_misplaced_members'): + QtSiteConfig.update_misplaced_members(members=_misplaced_members) + + if hasattr(QtSiteConfig, 'update_compatibility_members'): + QtSiteConfig.update_compatibility_members( + members=_compatibility_members) + + +def _new_module(name): + return types.ModuleType(__name__ + "." + name) + + +def _import_sub_module(module, name): + """import_sub_module will mimic the function of importlib.import_module""" + module = __import__(module.__name__ + "." + name) + for level in name.split("."): + module = getattr(module, level) + return module + + +def _setup(module, extras): + """Install common submodules""" + + Qt.__binding__ = module.__name__ + + for name in list(_common_members) + extras: + try: + submodule = _import_sub_module( + module, name) + except ImportError: + try: + # For extra modules like sip and shiboken that may not be + # children of the binding. + submodule = __import__(name) + except ImportError: + continue + + setattr(Qt, "_" + name, submodule) + + if name not in extras: + # Store reference to original binding, + # but don't store speciality modules + # such as uic or QtUiTools + setattr(Qt, name, _new_module(name)) + + +def _reassign_misplaced_members(binding): + """Apply misplaced members from `binding` to Qt.py + + Arguments: + binding (dict): Misplaced members + + """ + + for src, dst in _misplaced_members[binding].items(): + dst_value = None + + src_parts = src.split(".") + src_module = src_parts[0] + src_member = None + if len(src_parts) > 1: + src_member = src_parts[1:] + + if isinstance(dst, (list, tuple)): + dst, dst_value = dst + + dst_parts = dst.split(".") + dst_module = dst_parts[0] + dst_member = None + if len(dst_parts) > 1: + dst_member = dst_parts[1] + + # Get the member we want to store in the namesapce. + if not dst_value: + try: + _part = getattr(Qt, "_" + src_module) + while src_member: + member = src_member.pop(0) + _part = getattr(_part, member) + dst_value = _part + except AttributeError: + # If the member we want to store in the namespace does not + # exist, there is no need to continue. This can happen if a + # request was made to rename a member that didn't exist, for + # example if QtWidgets isn't available on the target platform. + _log("Misplaced member has no source: {0}".format(src)) + continue + + try: + src_object = getattr(Qt, dst_module) + except AttributeError: + if dst_module not in _common_members: + # Only create the Qt parent module if its listed in + # _common_members. Without this check, if you remove QtCore + # from _common_members, the default _misplaced_members will add + # Qt.QtCore so it can add Signal, Slot, etc. + msg = 'Not creating missing member module "{m}" for "{c}"' + _log(msg.format(m=dst_module, c=dst_member)) + continue + # If the dst is valid but the Qt parent module does not exist + # then go ahead and create a new module to contain the member. + setattr(Qt, dst_module, _new_module(dst_module)) + src_object = getattr(Qt, dst_module) + # Enable direct import of the new module + sys.modules[__name__ + "." + dst_module] = src_object + + if not dst_value: + dst_value = getattr(Qt, "_" + src_module) + if src_member: + dst_value = getattr(dst_value, src_member) + + setattr( + src_object, + dst_member or dst_module, + dst_value + ) + + +def _build_compatibility_members(binding, decorators=None): + """Apply `binding` to QtCompat + + Arguments: + binding (str): Top level binding in _compatibility_members. + decorators (dict, optional): Provides the ability to decorate the + original Qt methods when needed by a binding. This can be used + to change the returned value to a standard value. The key should + be the classname, the value is a dict where the keys are the + target method names, and the values are the decorator functions. + + """ + + decorators = decorators or dict() + + # Allow optional site-level customization of the compatibility members. + # This method does not need to be implemented in QtSiteConfig. + try: + import QtSiteConfig + except ImportError: + pass + else: + if hasattr(QtSiteConfig, 'update_compatibility_decorators'): + QtSiteConfig.update_compatibility_decorators(binding, decorators) + + _QtCompat = type("QtCompat", (object,), {}) + + for classname, bindings in _compatibility_members[binding].items(): + attrs = {} + for target, binding in bindings.items(): + namespaces = binding.split('.') + try: + src_object = getattr(Qt, "_" + namespaces[0]) + except AttributeError as e: + _log("QtCompat: AttributeError: %s" % e) + # Skip reassignment of non-existing members. + # This can happen if a request was made to + # rename a member that didn't exist, for example + # if QtWidgets isn't available on the target platform. + continue + + # Walk down any remaining namespace getting the object assuming + # that if the first namespace exists the rest will exist. + for namespace in namespaces[1:]: + src_object = getattr(src_object, namespace) + + # decorate the Qt method if a decorator was provided. + if target in decorators.get(classname, []): + # staticmethod must be called on the decorated method to + # prevent a TypeError being raised when the decorated method + # is called. + src_object = staticmethod( + decorators[classname][target](src_object)) + + attrs[target] = src_object + + # Create the QtCompat class and install it into the namespace + compat_class = type(classname, (_QtCompat,), attrs) + setattr(Qt.QtCompat, classname, compat_class) + + +def _pyside2(): + """Initialise PySide2 + + These functions serve to test the existence of a binding + along with set it up in such a way that it aligns with + the final step; adding members from the original binding + to Qt.py + + """ + + import PySide2 as module + extras = ["QtUiTools"] + try: + try: + # Before merge of PySide and shiboken + import shiboken2 + except ImportError: + # After merge of PySide and shiboken, May 2017 + from PySide2 import shiboken2 + extras.append("shiboken2") + except ImportError: + pass + + _setup(module, extras) + Qt.__binding_version__ = module.__version__ + + if hasattr(Qt, "_shiboken2"): + Qt.QtCompat.wrapInstance = _wrapinstance + Qt.QtCompat.getCppPointer = _getcpppointer + Qt.QtCompat.delete = shiboken2.delete + + if hasattr(Qt, "_QtUiTools"): + Qt.QtCompat.loadUi = _loadUi + + if hasattr(Qt, "_QtCore"): + Qt.__qt_version__ = Qt._QtCore.qVersion() + Qt.QtCompat.dataChanged = ( + lambda self, topleft, bottomright, roles=None: + self.dataChanged.emit(topleft, bottomright, roles or []) + ) + + if hasattr(Qt, "_QtWidgets"): + Qt.QtCompat.setSectionResizeMode = \ + Qt._QtWidgets.QHeaderView.setSectionResizeMode + + _reassign_misplaced_members("PySide2") + _build_compatibility_members("PySide2") + + +def _pyside(): + """Initialise PySide""" + + import PySide as module + extras = ["QtUiTools"] + try: + try: + # Before merge of PySide and shiboken + import shiboken + except ImportError: + # After merge of PySide and shiboken, May 2017 + from PySide import shiboken + extras.append("shiboken") + except ImportError: + pass + + _setup(module, extras) + Qt.__binding_version__ = module.__version__ + + if hasattr(Qt, "_shiboken"): + Qt.QtCompat.wrapInstance = _wrapinstance + Qt.QtCompat.getCppPointer = _getcpppointer + Qt.QtCompat.delete = shiboken.delete + + if hasattr(Qt, "_QtUiTools"): + Qt.QtCompat.loadUi = _loadUi + + if hasattr(Qt, "_QtGui"): + setattr(Qt, "QtWidgets", _new_module("QtWidgets")) + setattr(Qt, "_QtWidgets", Qt._QtGui) + if hasattr(Qt._QtGui, "QX11Info"): + setattr(Qt, "QtX11Extras", _new_module("QtX11Extras")) + Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info + + Qt.QtCompat.setSectionResizeMode = Qt._QtGui.QHeaderView.setResizeMode + + if hasattr(Qt, "_QtCore"): + Qt.__qt_version__ = Qt._QtCore.qVersion() + Qt.QtCompat.dataChanged = ( + lambda self, topleft, bottomright, roles=None: + self.dataChanged.emit(topleft, bottomright) + ) + + _reassign_misplaced_members("PySide") + _build_compatibility_members("PySide") + + +def _pyqt5(): + """Initialise PyQt5""" + + import PyQt5 as module + extras = ["uic"] + + try: + import sip + extras += ["sip"] + except ImportError: + + # Relevant to PyQt5 5.11 and above + try: + from PyQt5 import sip + extras += ["sip"] + except ImportError: + sip = None + + _setup(module, extras) + if hasattr(Qt, "_sip"): + Qt.QtCompat.wrapInstance = _wrapinstance + Qt.QtCompat.getCppPointer = _getcpppointer + Qt.QtCompat.delete = sip.delete + + if hasattr(Qt, "_uic"): + Qt.QtCompat.loadUi = _loadUi + + if hasattr(Qt, "_QtCore"): + Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR + Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR + Qt.QtCompat.dataChanged = ( + lambda self, topleft, bottomright, roles=None: + self.dataChanged.emit(topleft, bottomright, roles or []) + ) + + if hasattr(Qt, "_QtWidgets"): + Qt.QtCompat.setSectionResizeMode = \ + Qt._QtWidgets.QHeaderView.setSectionResizeMode + + _reassign_misplaced_members("PyQt5") + _build_compatibility_members('PyQt5') + + +def _pyqt4(): + """Initialise PyQt4""" + + import sip + + # Validation of envivornment variable. Prevents an error if + # the variable is invalid since it's just a hint. + try: + hint = int(QT_SIP_API_HINT) + except TypeError: + hint = None # Variable was None, i.e. not set. + except ValueError: + raise ImportError("QT_SIP_API_HINT=%s must be a 1 or 2") + + for api in ("QString", + "QVariant", + "QDate", + "QDateTime", + "QTextStream", + "QTime", + "QUrl"): + try: + sip.setapi(api, hint or 2) + except AttributeError: + raise ImportError("PyQt4 < 4.6 isn't supported by Qt.py") + except ValueError: + actual = sip.getapi(api) + if not hint: + raise ImportError("API version already set to %d" % actual) + else: + # Having provided a hint indicates a soft constraint, one + # that doesn't throw an exception. + sys.stderr.write( + "Warning: API '%s' has already been set to %d.\n" + % (api, actual) + ) + + import PyQt4 as module + extras = ["uic"] + try: + import sip + extras.append(sip.__name__) + except ImportError: + sip = None + + _setup(module, extras) + if hasattr(Qt, "_sip"): + Qt.QtCompat.wrapInstance = _wrapinstance + Qt.QtCompat.getCppPointer = _getcpppointer + Qt.QtCompat.delete = sip.delete + + if hasattr(Qt, "_uic"): + Qt.QtCompat.loadUi = _loadUi + + if hasattr(Qt, "_QtGui"): + setattr(Qt, "QtWidgets", _new_module("QtWidgets")) + setattr(Qt, "_QtWidgets", Qt._QtGui) + if hasattr(Qt._QtGui, "QX11Info"): + setattr(Qt, "QtX11Extras", _new_module("QtX11Extras")) + Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info + + Qt.QtCompat.setSectionResizeMode = \ + Qt._QtGui.QHeaderView.setResizeMode + + if hasattr(Qt, "_QtCore"): + Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR + Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR + Qt.QtCompat.dataChanged = ( + lambda self, topleft, bottomright, roles=None: + self.dataChanged.emit(topleft, bottomright) + ) + + _reassign_misplaced_members("PyQt4") + + # QFileDialog QtCompat decorator + def _standardizeQFileDialog(some_function): + """Decorator that makes PyQt4 return conform to other bindings""" + def wrapper(*args, **kwargs): + ret = (some_function(*args, **kwargs)) + + # PyQt4 only returns the selected filename, force it to a + # standard return of the selected filename, and a empty string + # for the selected filter + return ret, '' + + wrapper.__doc__ = some_function.__doc__ + wrapper.__name__ = some_function.__name__ + + return wrapper + + decorators = { + "QFileDialog": { + "getOpenFileName": _standardizeQFileDialog, + "getOpenFileNames": _standardizeQFileDialog, + "getSaveFileName": _standardizeQFileDialog, + } + } + _build_compatibility_members('PyQt4', decorators) + + +def _none(): + """Internal option (used in installer)""" + + Mock = type("Mock", (), {"__getattr__": lambda Qt, attr: None}) + + Qt.__binding__ = "None" + Qt.__qt_version__ = "0.0.0" + Qt.__binding_version__ = "0.0.0" + Qt.QtCompat.loadUi = lambda uifile, baseinstance=None: None + Qt.QtCompat.setSectionResizeMode = lambda *args, **kwargs: None + + for submodule in _common_members.keys(): + setattr(Qt, submodule, Mock()) + setattr(Qt, "_" + submodule, Mock()) + + +def _log(text): + if QT_VERBOSE: + sys.stdout.write(text + "\n") + + +def _convert(lines): + """Convert compiled .ui file from PySide2 to Qt.py + + Arguments: + lines (list): Each line of of .ui file + + Usage: + >> with open("myui.py") as f: + .. lines = _convert(f.readlines()) + + """ + + def parse(line): + line = line.replace("from PySide2 import", "from Qt import QtCompat,") + line = line.replace("QtWidgets.QApplication.translate", + "QtCompat.translate") + if "QtCore.SIGNAL" in line: + raise NotImplementedError("QtCore.SIGNAL is missing from PyQt5 " + "and so Qt.py does not support it: you " + "should avoid defining signals inside " + "your ui files.") + return line + + parsed = list() + for line in lines: + line = parse(line) + parsed.append(line) + + return parsed + + +def _cli(args): + """Qt.py command-line interface""" + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--convert", + help="Path to compiled Python module, e.g. my_ui.py") + parser.add_argument("--compile", + help="Accept raw .ui file and compile with native " + "PySide2 compiler.") + parser.add_argument("--stdout", + help="Write to stdout instead of file", + action="store_true") + parser.add_argument("--stdin", + help="Read from stdin instead of file", + action="store_true") + + args = parser.parse_args(args) + + if args.stdout: + raise NotImplementedError("--stdout") + + if args.stdin: + raise NotImplementedError("--stdin") + + if args.compile: + raise NotImplementedError("--compile") + + if args.convert: + sys.stdout.write("#\n" + "# WARNING: --convert is an ALPHA feature.\n#\n" + "# See https://github.com/mottosso/Qt.py/pull/132\n" + "# for details.\n" + "#\n") + + # + # ------> Read + # + with open(args.convert) as f: + lines = _convert(f.readlines()) + + backup = "%s_backup%s" % os.path.splitext(args.convert) + sys.stdout.write("Creating \"%s\"..\n" % backup) + shutil.copy(args.convert, backup) + + # + # <------ Write + # + with open(args.convert, "w") as f: + f.write("".join(lines)) + + sys.stdout.write("Successfully converted \"%s\"\n" % args.convert) + + +class MissingMember(object): + """ + A placeholder type for a missing Qt object not + included in Qt.py + + Args: + name (str): The name of the missing type + details (str): An optional custom error message + """ + ERR_TMPL = ("{} is not a common object across PySide2 " + "and the other Qt bindings. It is not included " + "as a common member in the Qt.py layer") + + def __init__(self, name, details=''): + self.__name = name + self.__err = self.ERR_TMPL.format(name) + + if details: + self.__err = "{}: {}".format(self.__err, details) + + def __repr__(self): + return "<{}: {}>".format(self.__class__.__name__, self.__name) + + def __getattr__(self, name): + raise NotImplementedError(self.__err) + + def __call__(self, *a, **kw): + raise NotImplementedError(self.__err) + + +def _install(): + # Default order (customise order and content via QT_PREFERRED_BINDING) + default_order = ("PySide2", "PyQt5", "PySide", "PyQt4") + preferred_order = list( + b for b in QT_PREFERRED_BINDING.split(os.pathsep) if b + ) + + order = preferred_order or default_order + + available = { + "PySide2": _pyside2, + "PyQt5": _pyqt5, + "PySide": _pyside, + "PyQt4": _pyqt4, + "None": _none + } + + _log("Order: '%s'" % "', '".join(order)) + + # Allow site-level customization of the available modules. + _apply_site_config() + + found_binding = False + for name in order: + _log("Trying %s" % name) + + try: + available[name]() + found_binding = True + break + + except ImportError as e: + _log("ImportError: %s" % e) + + except KeyError: + _log("ImportError: Preferred binding '%s' not found." % name) + + if not found_binding: + # If not binding were found, throw this error + raise ImportError("No Qt binding were found.") + + # Install individual members + for name, members in _common_members.items(): + try: + their_submodule = getattr(Qt, "_%s" % name) + except AttributeError: + continue + + our_submodule = getattr(Qt, name) + + # Enable import * + __all__.append(name) + + # Enable direct import of submodule, + # e.g. import Qt.QtCore + sys.modules[__name__ + "." + name] = our_submodule + + for member in members: + # Accept that a submodule may miss certain members. + try: + their_member = getattr(their_submodule, member) + except AttributeError: + _log("'%s.%s' was missing." % (name, member)) + continue + + setattr(our_submodule, member, their_member) + + # Install missing member placeholders + for name, members in _missing_members.items(): + our_submodule = getattr(Qt, name) + + for member in members: + + # If the submodule already has this member installed, + # either by the common members, or the site config, + # then skip installing this one over it. + if hasattr(our_submodule, member): + continue + + placeholder = MissingMember("{}.{}".format(name, member), + details=members[member]) + setattr(our_submodule, member, placeholder) + + # Enable direct import of QtCompat + sys.modules['Qt.QtCompat'] = Qt.QtCompat + + # Backwards compatibility + if hasattr(Qt.QtCompat, 'loadUi'): + Qt.QtCompat.load_ui = Qt.QtCompat.loadUi + + +_install() + +# Setup Binding Enum states +Qt.IsPySide2 = Qt.__binding__ == 'PySide2' +Qt.IsPyQt5 = Qt.__binding__ == 'PyQt5' +Qt.IsPySide = Qt.__binding__ == 'PySide' +Qt.IsPyQt4 = Qt.__binding__ == 'PyQt4' + +"""Augment QtCompat + +QtCompat contains wrappers and added functionality +to the original bindings, such as the CLI interface +and otherwise incompatible members between bindings, +such as `QHeaderView.setSectionResizeMode`. + +""" + +Qt.QtCompat._cli = _cli +Qt.QtCompat._convert = _convert + +# Enable command-line interface +if __name__ == "__main__": + _cli(sys.argv[1:]) + + +# The MIT License (MIT) +# +# Copyright (c) 2016-2017 Marcus Ottosson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +# In PySide(2), loadUi does not exist, so we implement it +# +# `_UiLoader` is adapted from the qtpy project, which was further influenced +# by qt-helpers which was released under a 3-clause BSD license which in turn +# is based on a solution at: +# +# - https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8 +# +# The License for this code is as follows: +# +# qt-helpers - a common front-end to various Qt modules +# +# Copyright (c) 2015, Chris Beaumont and Thomas Robitaille +# +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the +# distribution. +# * Neither the name of the Glue project nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Which itself was based on the solution at +# +# https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8 +# +# which was released under the MIT license: +# +# Copyright (c) 2011 Sebastian Wiesner +# Modifications by Charl Botha +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files +# (the "Software"),to deal in the Software without restriction, +# including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/python3.9libs/QtPy-1.9.0.dist-info/AUTHORS.md b/python3.9libs/QtPy-1.9.0.dist-info/AUTHORS.md new file mode 100644 index 0000000..a6b4d15 --- /dev/null +++ b/python3.9libs/QtPy-1.9.0.dist-info/AUTHORS.md @@ -0,0 +1,16 @@ +Maintainer +========== + +Gonzalo Peña-Castellanos ([@goanpeca](http://github.com/goanpeca)) + +Main Authors +============ + +* Colin Duquesnoy ([@ColinDuquesnoy](http://github.com/ColinDuquesnoy)) + +* [The Spyder Development Team](https://github.com/spyder-ide/spyder/graphs/contributors) + +Contributors +============ + +* Thomas Robitaille ([@astrofrog](http://www.github.com/astrofrog)) \ No newline at end of file diff --git a/python3.9libs/QtPy-1.9.0.dist-info/INSTALLER b/python3.9libs/QtPy-1.9.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/python3.9libs/QtPy-1.9.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/python3.9libs/QtPy-1.9.0.dist-info/LICENSE.txt b/python3.9libs/QtPy-1.9.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..ea70d57 --- /dev/null +++ b/python3.9libs/QtPy-1.9.0.dist-info/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) The Spyder Development Team + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/python3.9libs/QtPy-1.9.0.dist-info/METADATA b/python3.9libs/QtPy-1.9.0.dist-info/METADATA new file mode 100644 index 0000000..673e5b3 --- /dev/null +++ b/python3.9libs/QtPy-1.9.0.dist-info/METADATA @@ -0,0 +1,111 @@ +Metadata-Version: 2.1 +Name: QtPy +Version: 1.9.0 +Summary: Provides an abstraction layer on top of the various Qt bindings (PyQt5, PyQt4 and PySide) and additional custom QWidgets. +Home-page: https://github.com/spyder-ide/qtpy +Author: Colin Duquesnoy, The Spyder Development Team +Author-email: goanpeca@gmail.com +Maintainer: Gonzalo Peña-Castellanos +Maintainer-email: goanpeca@gmail.com +License: MIT +Keywords: qt PyQt4 PyQt5 PySide +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: X11 Applications :: Qt +Classifier: Environment :: Win32 (MS Windows) +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Description-Content-Type: text/markdown + +# QtPy: Abstraction layer for PyQt5/PyQt4/PySide2/PySide + +[![license](https://img.shields.io/pypi/l/qtpy.svg)](./LICENSE) +[![pypi version](https://img.shields.io/pypi/v/qtpy.svg)](https://pypi.org/project/QtPy/) +[![conda version](https://img.shields.io/conda/vn/conda-forge/qtpy.svg)](https://www.anaconda.com/download/) +[![download count](https://img.shields.io/conda/dn/conda-forge/qtpy.svg)](https://www.anaconda.com/download/) +[![OpenCollective Backers](https://opencollective.com/spyder/backers/badge.svg?color=blue)](#backers) +[![Join the chat at https://gitter.im/spyder-ide/public](https://badges.gitter.im/spyder-ide/spyder.svg)](https://gitter.im/spyder-ide/public)
+[![PyPI status](https://img.shields.io/pypi/status/qtpy.svg)](https://github.com/spyder-ide/qtpy) +[![Build status](https://ci.appveyor.com/api/projects/status/62y6i02vhn4hefg0/branch/master?svg=true)](https://ci.appveyor.com/project/spyder-ide/qtpy/branch/master) +[![CircleCI](https://circleci.com/gh/spyder-ide/qtpy.svg?style=shield)](https://circleci.com/gh/spyder-ide/qtpy) +[![Coverage Status](https://coveralls.io/repos/github/spyder-ide/qtpy/badge.svg?branch=master)](https://coveralls.io/github/spyder-ide/qtpy?branch=master) + +*Copyright © 2009–2019 The Spyder Development Team* + + +## Description + +**QtPy** is a small abstraction layer that lets you +write applications using a single API call to either PyQt or PySide. + +It provides support for PyQt5, PyQt4, PySide2 and PySide using the Qt5 layout +(where the QtGui module has been split into QtGui and QtWidgets). + +Basically, you can write your code as if you were using PySide2 +but import Qt modules from `qtpy` instead of `PySide2` (or `PyQt5`) + + +### Attribution and acknowledgments + +This project is based on the [pyqode.qt](https://github.com/pyQode/pyqode.qt) +project and the [spyderlib.qt](https://github.com/spyder-ide/spyder/tree/2.3/spyderlib/qt) +module from the [Spyder](https://github.com/spyder-ide/spyder) project, and +also includes contributions adapted from +[qt-helpers](https://github.com/glue-viz/qt-helpers), developed as part of the +[glue](http://glueviz.org) project. + +Unlike `pyqode.qt` this is not a namespace package, so it is not tied +to a particular project or namespace. + + +### License + +This project is released under the MIT license. + + +### Requirements + +You need PyQt5, PyQt4, PySide2 or PySide installed in your system to make use +of QtPy. If several of these packages are found, PyQt5 is used by +default unless you set the `QT_API` environment variable. + +`QT_API` can take the following values: + +* `pyqt5` (to use PyQt5). +* `pyqt` or `pyqt4` (to use PyQt4). +* `pyside2` (to use PySide2) +* `pyside` (to use PySide). + + +### Installation + +```bash +pip install qtpy +``` + +or + +```bash +conda install qtpy +``` + + +## Contributing + +Everyone is welcome to contribute! + + +## Sponsors + +Become a sponsor to get your logo on our README on Github. + +[![Sponsors](https://opencollective.com/spyder/sponsors.svg)](https://opencollective.com/spyder#support) + + diff --git a/python3.9libs/QtPy-1.9.0.dist-info/RECORD b/python3.9libs/QtPy-1.9.0.dist-info/RECORD new file mode 100644 index 0000000..ad05ce9 --- /dev/null +++ b/python3.9libs/QtPy-1.9.0.dist-info/RECORD @@ -0,0 +1,151 @@ +QtPy-1.9.0.dist-info/AUTHORS.md,sha256=3Is_5Qw5nv29Uy0EcRdrbz-LwcSaJt_g6keMqIEA4tE,377 +QtPy-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +QtPy-1.9.0.dist-info/LICENSE.txt,sha256=DZr1xWKPB7JHhLPetqlpJekYl006gNXSuhdGdqfkKmE,1112 +QtPy-1.9.0.dist-info/METADATA,sha256=NNbn0wWG0Spx8zmYaOYh5ncOspMA1_CoqichanIGa80,4081 +QtPy-1.9.0.dist-info/RECORD,, +QtPy-1.9.0.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 +QtPy-1.9.0.dist-info/top_level.txt,sha256=P_I2N1064Bw78JAT09wjPsZSW63PhTkGce3YwzuqZEM,5 +qtpy/Qt3DAnimation.py,sha256=s9hyQvNR_QkoQzihdQdiO2od01MeDT7MkBB8Zzg44rI,972 +qtpy/Qt3DAnimation.pyc,, +qtpy/Qt3DCore.py,sha256=cNpT0qWKEcb6ryw27KbYRfVD3-Xf0UyWZS0bArrkPyM,952 +qtpy/Qt3DCore.pyc,, +qtpy/Qt3DExtras.py,sha256=Nk6ZIzR2shmQPhrn2M3pT5ARZQx6wLtFK6H8mc2B3l4,960 +qtpy/Qt3DExtras.pyc,, +qtpy/Qt3DInput.py,sha256=FKNVDe3DIIqxIsnezznDo-zTN3uZcByksCASvvd5G6c,956 +qtpy/Qt3DInput.pyc,, +qtpy/Qt3DLogic.py,sha256=jIWoeVH3zE_O5eBQpiAZM17dXlcIl_ltHzjaiOKUB94,956 +qtpy/Qt3DLogic.pyc,, +qtpy/Qt3DRender.py,sha256=DMUc6D2RHPZZWvg6f9VS7UYQFz_k5mu_v8FMRj1ALHw,960 +qtpy/Qt3DRender.pyc,, +qtpy/QtCharts.py,sha256=4jye4sZDJVrQ8dI1SyLx_qycoEIWi63c09cTSsx-c5w,765 +qtpy/QtCharts.pyc,, +qtpy/QtCore.py,sha256=kVwNZNv-9vq5bLNntmFPz8A2XpoWxebFe2ohFi0dfZk,4462 +qtpy/QtCore.pyc,, +qtpy/QtDatavisualization.py,sha256=q9ipuxcWIx-wHjhZNBULkAX1rT703n70u3Y1PRNC2u4,805 +qtpy/QtDatavisualization.pyc,, +qtpy/QtDesigner.py,sha256=qOig21NKImNWxe9Ql7tiy3Ld_sNihDCuyT60SdZW0mY,399 +qtpy/QtDesigner.pyc,, +qtpy/QtGui.py,sha256=M0O3SD7dd5TtnyxNxCB9YrE70_e0Y5SaWAT1qSrtPvo,8506 +qtpy/QtGui.pyc,, +qtpy/QtHelp.py,sha256=KgUKz04ynfzqDPljnQM0nxyDAviCu213TXD-aPyNQSk,456 +qtpy/QtHelp.pyc,, +qtpy/QtLocation.py,sha256=Y9kpbvAMFqLjf1GXkEJJ3FQHJlWJw8TdxdlgpIZt7OE,583 +qtpy/QtLocation.pyc,, +qtpy/QtMultimedia.py,sha256=g00BUtDsI4KH1---tKQbER7E4y6T8XV7iJncr3tSaqA,372 +qtpy/QtMultimedia.pyc,, +qtpy/QtMultimediaWidgets.py,sha256=ZLP_Nw8EFdf2ClsbbVAhoGsGe715GppdbQewPRqGKhk,610 +qtpy/QtMultimediaWidgets.pyc,, +qtpy/QtNetwork.py,sha256=BQTUba4jD3QbLp0CZhXDOaJcp0Cb7kg3CKbdFkN4Qus,560 +qtpy/QtNetwork.pyc,, +qtpy/QtOpenGL.py,sha256=_72Pp6BAQVkPd8fa-gpwXzRgUX_EZpNo3xDcEWRVS6o,719 +qtpy/QtOpenGL.pyc,, +qtpy/QtPrintSupport.py,sha256=UOYPgdJ3CqLrTZhW_7AstLgk-3Nba4UrVOUr6lsHESM,909 +qtpy/QtPrintSupport.pyc,, +qtpy/QtQml.py,sha256=AH_Ta3k4mG0rcNRjcl9Q-uVgXyj26Kjbxz0ZrqXIF4Q,568 +qtpy/QtQml.pyc,, +qtpy/QtQuick.py,sha256=78qthG4uM5KJXd_vckQ-9CdatwWQeA7gSzBlIIA0mk4,574 +qtpy/QtQuick.pyc,, +qtpy/QtQuickWidgets.py,sha256=nHtgfqWnvshzaG_QqmRSHJJa-x8-KfCXZjNMCfa5j9Y,595 +qtpy/QtQuickWidgets.pyc,, +qtpy/QtSql.py,sha256=5GEU3nYz-z6g7eTR5hHc_M4G_VVCPb_nfj3s43vbQsk,704 +qtpy/QtSql.pyc,, +qtpy/QtSvg.py,sha256=ldn5I45AaOqk44NOzLyKEmgBU3C2rK7eV3X_k-Ymb28,704 +qtpy/QtSvg.pyc,, +qtpy/QtTest.py,sha256=OA81myugWe07BWvL_ltFb_yqJztSTX-a_ay-KGNn0a4,705 +qtpy/QtTest.pyc,, +qtpy/QtWebChannel.py,sha256=fTXCGZJv8gSeRNXPVTlM4V4l_Ms61l7iPMY6inMgqaQ,589 +qtpy/QtWebChannel.pyc,, +qtpy/QtWebEngineWidgets.py,sha256=VVkxjTnpU0E7hhnBxezxIWBDn5xeTmDQZv8EIPY7MA8,1534 +qtpy/QtWebEngineWidgets.pyc,, +qtpy/QtWebSockets.py,sha256=1W61nD-UCVGwDbd2RHxyCKp0bZ9Mk1ugLZpTSWkQlyo,589 +qtpy/QtWebSockets.pyc,, +qtpy/QtWidgets.py,sha256=KP_pK2Fwp4uvWsb3dDOQDUuYJrghzH6G5wjGhtqIy4o,6339 +qtpy/QtWidgets.pyc,, +qtpy/QtXmlPatterns.py,sha256=DlgoOrgqIjyNJxmi3UTmPDTndSeCRA7D5PPsMNKEs2M,709 +qtpy/QtXmlPatterns.pyc,, +qtpy/__init__.py,sha256=R4EzurvaSHt_iJPZRJhkovqil2shYJ4PC8lb1YgySbg,7103 +qtpy/__init__.pyc,, +qtpy/_patch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +qtpy/_patch/__init__.pyc,, +qtpy/_patch/qcombobox.py,sha256=LDByLiq_z2Sw1rFptIUX40ezSpSbVYgg1AL4zE5X1mk,4134 +qtpy/_patch/qcombobox.pyc,, +qtpy/_patch/qheaderview.py,sha256=fxwirOipxjtUZxMElny7ADtKnXoDv04zdyYRz6RgY_s,3344 +qtpy/_patch/qheaderview.pyc,, +qtpy/_version.py,sha256=fSIIGzfD3C6cy39MclaPFLKAqly2IbByH0uqLoZ-EAI,72 +qtpy/_version.pyc,, +qtpy/compat.py,sha256=buj1nfGsVRCtUeoTssy-9kdCWWw3Haqq_V5XE9H-ia8,7732 +qtpy/compat.pyc,, +qtpy/py3compat.py,sha256=5HPZRJKqs4iQTIxmiFwSboiW3UBKpjffOyUvNGXb7jc,6435 +qtpy/py3compat.pyc,, +qtpy/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +qtpy/tests/__init__.pyc,, +qtpy/tests/conftest.py,sha256=pIdgsnYfIyH1_04afKZZxjVPZzGUC_fslyqTDJd69zo,1818 +qtpy/tests/conftest.pyc,, +qtpy/tests/runtests.py,sha256=v-1ODzFrSYYTYn6d3IuDReG_oKWdfPsI1g-AsFIjaEg,712 +qtpy/tests/runtests.pyc,, +qtpy/tests/test_macos_checks.py,sha256=68dFCz5CsM61z4PYcnr5mzmGEJg7T-E4YjbSbgLhpug,2939 +qtpy/tests/test_macos_checks.pyc,, +qtpy/tests/test_main.py,sha256=7lp8c1KvE5xy9MJXk9q8PjQc4ZLIB--ejmVmYdunz5E,2470 +qtpy/tests/test_main.pyc,, +qtpy/tests/test_patch_qcombobox.py,sha256=4At9SshvF8CJxtZ3fQT0eu2b_3bY18kfjS_tB4fM1GM,3181 +qtpy/tests/test_patch_qcombobox.pyc,, +qtpy/tests/test_patch_qheaderview.py,sha256=yd6GwqBwM-xzRayUZhUJpU8fwc2pa9kz1PjsivNJIWA,3652 +qtpy/tests/test_patch_qheaderview.pyc,, +qtpy/tests/test_qdesktopservice_split.py,sha256=lzdHEQf5LDpDdn7Co75OGD38Rgoz5EhIPP5emccJ-bA,1297 +qtpy/tests/test_qdesktopservice_split.pyc,, +qtpy/tests/test_qt3danimation.py,sha256=_UN6keLC_e-Y_1ibC1SMBAOWgf4Jn6mThyPhJHcmfkI,1147 +qtpy/tests/test_qt3danimation.pyc,, +qtpy/tests/test_qt3dcore.py,sha256=5L8rA7TfQrceGZqw4HBdipyWRLgiTKvJMROHMSPdMgI,2035 +qtpy/tests/test_qt3dcore.pyc,, +qtpy/tests/test_qt3dextras.py,sha256=2Tm6sNuttdwH_5F1Ui0W2uLtKIedUo03aIE1jzzGDcQ,2220 +qtpy/tests/test_qt3dextras.pyc,, +qtpy/tests/test_qt3dinput.py,sha256=h_o513Wnyb9_CvFDrO8jywvkZsYmFLGf611e_mHuNmc,1343 +qtpy/tests/test_qt3dinput.pyc,, +qtpy/tests/test_qt3dlogic.py,sha256=scVoX3LuisBwJK0x2HwaD0uqftJtcR1RnqyAzj__6b8,387 +qtpy/tests/test_qt3dlogic.pyc,, +qtpy/tests/test_qt3drender.py,sha256=0J8s6iLgtRczueNc6C0MSIgKnLCWeOkRQOrd6Xm1hs8,5687 +qtpy/tests/test_qt3drender.pyc,, +qtpy/tests/test_qtcharts.py,sha256=aS9TeF8QCU8d3gCfy3kMLAbZHDIuMEh0ostrtLjzbfA,303 +qtpy/tests/test_qtcharts.pyc,, +qtpy/tests/test_qtcore.py,sha256=ot7KLQBPN-tNrST9z9fZVFfTtSwZxFHFfNqTUHcBB9Q,445 +qtpy/tests/test_qtcore.pyc,, +qtpy/tests/test_qtdatavisualization.py,sha256=xxEqVm74YXhpj0tFH7ZuVWUeDp54bcwTXPaL9dsREu4,2420 +qtpy/tests/test_qtdatavisualization.pyc,, +qtpy/tests/test_qtdesigner.py,sha256=-FEpbYFkrQ6Yfd70bDZ5nHM-SBrOmgLvuuNhOgF8PME,1473 +qtpy/tests/test_qtdesigner.pyc,, +qtpy/tests/test_qthelp.py,sha256=dGBPMj_2WcSDj9zb-w_yCVjf9Pi0C_k715DWtbeujJU,707 +qtpy/tests/test_qthelp.pyc,, +qtpy/tests/test_qtlocation.py,sha256=CiFR7BYociebfJuphgFPiGfpOW3Lxm1ODeOmgtTsWRI,2262 +qtpy/tests/test_qtlocation.pyc,, +qtpy/tests/test_qtmultimedia.py,sha256=FimckmR0fhBsC4Xuqn_1W131VA_VKFm-fW14dOLRTrc,580 +qtpy/tests/test_qtmultimedia.pyc,, +qtpy/tests/test_qtmultimediawidgets.py,sha256=AWNUqyAEwCn-DaTAKVFy9HffvBkTneYSv_4HN4quNMg,716 +qtpy/tests/test_qtmultimediawidgets.pyc,, +qtpy/tests/test_qtnetwork.py,sha256=HkV3YwyCOwPPO-ouxol_EmK3WDTrf76GlaWDNRJFzdY,1860 +qtpy/tests/test_qtnetwork.pyc,, +qtpy/tests/test_qtprintsupport.py,sha256=nuUP1JnH6vefpVbJJ05i6mvBtanV5-6OH_XH4SXuHG8,597 +qtpy/tests/test_qtprintsupport.pyc,, +qtpy/tests/test_qtqml.py,sha256=EOs75uom0iBZlDvupccrAzuQfrSWVSn8taK3RIhKo0I,1413 +qtpy/tests/test_qtqml.pyc,, +qtpy/tests/test_qtquick.py,sha256=353Tc6FgzeEf_1GFXM86jG_R5DmMrf0r-XVzpsKEbt4,2250 +qtpy/tests/test_qtquick.pyc,, +qtpy/tests/test_qtquickwidgets.py,sha256=Ifl-Xby-ibNwjpDrrZbxJlkmCAkv6J21vEpwZmG_jy0,335 +qtpy/tests/test_qtquickwidgets.pyc,, +qtpy/tests/test_qtsql.py,sha256=wh_R0dR9YGzvGEvW-OLtFg4lyJQdexkvNd_pDowMgRg,843 +qtpy/tests/test_qtsql.pyc,, +qtpy/tests/test_qtsvg.py,sha256=6VbbV77zzsvVxuHwPr4GbnrYzN-bYlmJuzkz4KvWaxw,313 +qtpy/tests/test_qtsvg.pyc,, +qtpy/tests/test_qttest.py,sha256=8aj6q_aeFqacpbM_AesUoU1EEIG-GAceJsgNLqEh9kE,176 +qtpy/tests/test_qttest.pyc,, +qtpy/tests/test_qtwebchannel.py,sha256=mqgNgwU0zK_HTXkA18bXY9b4EbREWXGBSqJwtfr246A,393 +qtpy/tests/test_qtwebchannel.pyc,, +qtpy/tests/test_qtwebenginewidgets.py,sha256=Opvf1vXPAph1yQsywCWW6pO2p_FSVetXZqNsJF0Zd0o,346 +qtpy/tests/test_qtwebenginewidgets.pyc,, +qtpy/tests/test_qtwebsockets.py,sha256=kEWnrotGfrraJS11wfWWUhzWRprOt38PXSLGJCyxUpw,549 +qtpy/tests/test_qtwebsockets.pyc,, +qtpy/tests/test_qtxmlpatterns.py,sha256=kQP_nI7UVKvvqFGndTcHVbBvZ1taLjx5IpbJpJy1gMM,1088 +qtpy/tests/test_qtxmlpatterns.pyc,, +qtpy/tests/test_uic.py,sha256=WkVfT6PhInQh5-lBGh89ACa2hzURchZO1b-iJAJdgb0,2495 +qtpy/tests/test_uic.pyc,, +qtpy/uic.py,sha256=eqpJRZSskJKoYF9yK0VKiNEPuOqgv83hn3NKHj8v9kk,9454 +qtpy/uic.pyc,, diff --git a/python3.9libs/QtPy-1.9.0.dist-info/WHEEL b/python3.9libs/QtPy-1.9.0.dist-info/WHEEL new file mode 100644 index 0000000..78e6f69 --- /dev/null +++ b/python3.9libs/QtPy-1.9.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/python3.9libs/QtPy-1.9.0.dist-info/top_level.txt b/python3.9libs/QtPy-1.9.0.dist-info/top_level.txt new file mode 100644 index 0000000..086fa2e --- /dev/null +++ b/python3.9libs/QtPy-1.9.0.dist-info/top_level.txt @@ -0,0 +1 @@ +qtpy diff --git a/python3.9libs/__init__.py b/python3.9libs/__init__.py new file mode 100644 index 0000000..a207b60 --- /dev/null +++ b/python3.9libs/__init__.py @@ -0,0 +1,2 @@ +from playhouse import sqlite_ext +from peewee import * \ No newline at end of file diff --git a/python3.9libs/peewee-3.14.8.dist-info/INSTALLER b/python3.9libs/peewee-3.14.8.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/python3.9libs/peewee-3.14.8.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/python3.9libs/peewee-3.14.8.dist-info/LICENSE b/python3.9libs/peewee-3.14.8.dist-info/LICENSE new file mode 100644 index 0000000..c752ab3 --- /dev/null +++ b/python3.9libs/peewee-3.14.8.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010 Charles Leifer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/python3.9libs/peewee-3.14.8.dist-info/METADATA b/python3.9libs/peewee-3.14.8.dist-info/METADATA new file mode 100644 index 0000000..ee3562d --- /dev/null +++ b/python3.9libs/peewee-3.14.8.dist-info/METADATA @@ -0,0 +1,161 @@ +Metadata-Version: 2.1 +Name: peewee +Version: 3.14.8 +Summary: a little orm +Home-page: https://github.com/coleifer/peewee/ +Author: Charles Leifer +Author-email: coleifer@gmail.com +License: MIT License +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +License-File: LICENSE + +.. image:: https://media.charlesleifer.com/blog/photos/peewee3-logo.png + +peewee +====== + +Peewee is a simple and small ORM. It has few (but expressive) concepts, making it easy to learn and intuitive to use. + +* a small, expressive ORM +* python 2.7+ and 3.4+ (developed with 3.6) +* supports sqlite, mysql, postgresql and cockroachdb +* tons of `extensions `_ + +.. image:: https://travis-ci.org/coleifer/peewee.svg?branch=master + :target: https://travis-ci.org/coleifer/peewee + +New to peewee? These may help: + +* `Quickstart `_ +* `Example twitter app `_ +* `Using peewee interactively `_ +* `Models and fields `_ +* `Querying `_ +* `Relationships and joins `_ + +Examples +-------- + +Defining models is similar to Django or SQLAlchemy: + +.. code-block:: python + + from peewee import * + import datetime + + + db = SqliteDatabase('my_database.db') + + class BaseModel(Model): + class Meta: + database = db + + class User(BaseModel): + username = CharField(unique=True) + + class Tweet(BaseModel): + user = ForeignKeyField(User, backref='tweets') + message = TextField() + created_date = DateTimeField(default=datetime.datetime.now) + is_published = BooleanField(default=True) + +Connect to the database and create tables: + +.. code-block:: python + + db.connect() + db.create_tables([User, Tweet]) + +Create a few rows: + +.. code-block:: python + + charlie = User.create(username='charlie') + huey = User(username='huey') + huey.save() + + # No need to set `is_published` or `created_date` since they + # will just use the default values we specified. + Tweet.create(user=charlie, message='My first tweet') + +Queries are expressive and composable: + +.. code-block:: python + + # A simple query selecting a user. + User.get(User.username == 'charlie') + + # Get tweets created by one of several users. + usernames = ['charlie', 'huey', 'mickey'] + users = User.select().where(User.username.in_(usernames)) + tweets = Tweet.select().where(Tweet.user.in_(users)) + + # We could accomplish the same using a JOIN: + tweets = (Tweet + .select() + .join(User) + .where(User.username.in_(usernames))) + + # How many tweets were published today? + tweets_today = (Tweet + .select() + .where( + (Tweet.created_date >= datetime.date.today()) & + (Tweet.is_published == True)) + .count()) + + # Paginate the user table and show me page 3 (users 41-60). + User.select().order_by(User.username).paginate(3, 20) + + # Order users by the number of tweets they've created: + tweet_ct = fn.Count(Tweet.id) + users = (User + .select(User, tweet_ct.alias('ct')) + .join(Tweet, JOIN.LEFT_OUTER) + .group_by(User) + .order_by(tweet_ct.desc())) + + # Do an atomic update + Counter.update(count=Counter.count + 1).where(Counter.url == request.url) + +Check out the `example twitter app `_. + +Learning more +------------- + +Check the `documentation `_ for more examples. + +Specific question? Come hang out in the #peewee channel on irc.libera.chat, or post to the mailing list, http://groups.google.com/group/peewee-orm . If you would like to report a bug, `create a new issue `_ on GitHub. + +Still want more info? +--------------------- + +.. image:: https://media.charlesleifer.com/blog/photos/wat.jpg + +I've written a number of blog posts about building applications and web-services with peewee (and usually Flask). If you'd like to see some real-life applications that use peewee, the following resources may be useful: + +* `Building a note-taking app with Flask and Peewee `_ as well as `Part 2 `_ and `Part 3 `_. +* `Analytics web service built with Flask and Peewee `_. +* `Personalized news digest (with a boolean query parser!) `_. +* `Structuring Flask apps with Peewee `_. +* `Creating a lastpass clone with Flask and Peewee `_. +* `Creating a bookmarking web-service that takes screenshots of your bookmarks `_. +* `Building a pastebin, wiki and a bookmarking service using Flask and Peewee `_. +* `Encrypted databases with Python and SQLCipher `_. +* `Dear Diary: An Encrypted, Command-Line Diary with Peewee `_. +* `Query Tree Structures in SQLite using Peewee and the Transitive Closure Extension `_. + + diff --git a/python3.9libs/peewee-3.14.8.dist-info/RECORD b/python3.9libs/peewee-3.14.8.dist-info/RECORD new file mode 100644 index 0000000..4cf0a21 --- /dev/null +++ b/python3.9libs/peewee-3.14.8.dist-info/RECORD @@ -0,0 +1,61 @@ +../../../bin/__pycache__/pwiz.cpython-37.pyc,, +../../../bin/pwiz.py,sha256=qKjuSA5tKGefAGJZQM-Tcy43FhGqtD6dBwRG_yjbARo,8180 +__pycache__/peewee.cpython-37.pyc,, +__pycache__/pwiz.cpython-37.pyc,, +peewee-3.14.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +peewee-3.14.8.dist-info/LICENSE,sha256=N0AJYSWwhzWiR7jdCM2C4LqYTTvr2SIdN4V2Y35SQNo,1058 +peewee-3.14.8.dist-info/METADATA,sha256=5BP0M4qMT7nVWDrPh8EgXthHoE0vNzTLAybFZrkZNyw,6876 +peewee-3.14.8.dist-info/RECORD,, +peewee-3.14.8.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +peewee-3.14.8.dist-info/WHEEL,sha256=8MJuIn_o44q10qi23aeeO3JiijJAiu3iAJYjMuztSYU,104 +peewee-3.14.8.dist-info/top_level.txt,sha256=uV7RZ61bWm9zDrPVGNrGay4E4WDonEqtU2NPe5GGUWs,22 +peewee.py,sha256=MLxbxs2uKgXPjzPk8XZVJlP7t5sjfytqQuMQsT_9d3w,267565 +playhouse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +playhouse/__pycache__/__init__.cpython-37.pyc,, +playhouse/__pycache__/apsw_ext.cpython-37.pyc,, +playhouse/__pycache__/cockroachdb.cpython-37.pyc,, +playhouse/__pycache__/dataset.cpython-37.pyc,, +playhouse/__pycache__/db_url.cpython-37.pyc,, +playhouse/__pycache__/fields.cpython-37.pyc,, +playhouse/__pycache__/flask_utils.cpython-37.pyc,, +playhouse/__pycache__/hybrid.cpython-37.pyc,, +playhouse/__pycache__/kv.cpython-37.pyc,, +playhouse/__pycache__/migrate.cpython-37.pyc,, +playhouse/__pycache__/mysql_ext.cpython-37.pyc,, +playhouse/__pycache__/pool.cpython-37.pyc,, +playhouse/__pycache__/postgres_ext.cpython-37.pyc,, +playhouse/__pycache__/psycopg3_ext.cpython-37.pyc,, +playhouse/__pycache__/reflection.cpython-37.pyc,, +playhouse/__pycache__/shortcuts.cpython-37.pyc,, +playhouse/__pycache__/signals.cpython-37.pyc,, +playhouse/__pycache__/sqlcipher_ext.cpython-37.pyc,, +playhouse/__pycache__/sqlite_changelog.cpython-37.pyc,, +playhouse/__pycache__/sqlite_ext.cpython-37.pyc,, +playhouse/__pycache__/sqlite_udf.cpython-37.pyc,, +playhouse/__pycache__/sqliteq.cpython-37.pyc,, +playhouse/__pycache__/test_utils.cpython-37.pyc,, +playhouse/_sqlite_ext.cpython-37m-x86_64-linux-gnu.so,sha256=3pQW7MCcDIhBUnujrRYb-iAplhHdfgZLvdikvh1FI9c,1532752 +playhouse/_sqlite_udf.cpython-37m-x86_64-linux-gnu.so,sha256=L1aMacu0-abwgWN-iOY8QGjEceoMtg7G9crQYSpC8WY,658472 +playhouse/apsw_ext.py,sha256=wgbh2A2nL3lQ1xSzJXxMKvdCpWLNnQVFOfP4yRMe874,4581 +playhouse/cockroachdb.py,sha256=iaOZER6RDpVhcRvAKeHVYWI94HuJUeLgvqBM2SW4qCg,9101 +playhouse/dataset.py,sha256=R4dpY-zrVTDFiXT2Xj2KBdzlMfI0FxmTder_IQ3TT2I,14230 +playhouse/db_url.py,sha256=JFhMZN268SbumQeQWpYZwCiKxZeXI76cbwXCCi9SAZw,4246 +playhouse/fields.py,sha256=dyO8d3l-3uq_XP7gmQRVdL6k70oUfAiXGhCXjAbayeY,1699 +playhouse/flask_utils.py,sha256=N0stRAzFJXUKu7gWtE46zUeAInReHjJ-F2N5lJGPAM8,6034 +playhouse/hybrid.py,sha256=rRAPBImP2x61DoYh53mLm4JMPoNCLPFTd7_WIrq6_gU,1528 +playhouse/kv.py,sha256=S7AzM1v-G-BORNU1k_sJQm6MJettDZVBSWcRnqPoAoE,5375 +playhouse/migrate.py,sha256=f4XRdeXL2oU1XuwSU6HQfBI-op1BfddBOTdbeNRFKUs,30832 +playhouse/mysql_ext.py,sha256=jwkxzUk87xrYbNyQ5DWyPMuDDFtVCZkjSlxA11y0b34,2854 +playhouse/pool.py,sha256=nrp-zLRmzDQsbIVvT8r4GI6NwIP53Are2Sj8jm0uC3c,11476 +playhouse/postgres_ext.py,sha256=RXlNnVDIjk2SXkjYexjEnVBH42bNv8-hjnlG-6nLKck,14499 +playhouse/psycopg3_ext.py,sha256=ryO6uQI-ivgrOGc58gkHZsNEdMYunBarfs-4XLtXq2w,1142 +playhouse/reflection.py,sha256=IoYNNdkBW2q5tB8dJchc_E-NUDsWMwg69-WJ93WNXvE,30106 +playhouse/shortcuts.py,sha256=sLa460dR0l43ZPNTL02VXIvaAWxzKgTZJuVarLURE2Y,10016 +playhouse/signals.py,sha256=9jFNDCpbMmBOfwCNIKlB3PlL7Hc1j45Rn9RObELZc6M,2523 +playhouse/sqlcipher_ext.py,sha256=ZO8zN6pM4_gA-5ML3P7cnlCop5aLU2w9JLFTktNznkU,3632 +playhouse/sqlite_changelog.py,sha256=JrTcwlZzcFzoTdorr2Xtt_oAkv6MyLU7sRdTFADG950,4567 +playhouse/sqlite_ext.py,sha256=gikZsItAparybH9QxZDoTeiS_K4JyWcsjhXC-pXDCuY,45096 +playhouse/sqlite_udf.py,sha256=wl356xkDKRq6rNPZOF2SPKm8UY4W1u7o1xGliqPeUQk,13665 +playhouse/sqliteq.py,sha256=m6GwB85UfKOyfpaosMv76S_2HDj6kB8Oeiea7T6DHI4,10043 +playhouse/test_utils.py,sha256=sJwKeBq2ebR_VFVBGjHDmfzlAqR04GPXKnD3dfA-GAA,1737 +pwiz.py,sha256=7ctwTZ44cPsCCKTtd-8pPVMqNon9gTVy4K_Ue3gRLTQ,8193 diff --git a/python3.9libs/peewee-3.14.8.dist-info/REQUESTED b/python3.9libs/peewee-3.14.8.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/python3.9libs/peewee-3.14.8.dist-info/WHEEL b/python3.9libs/peewee-3.14.8.dist-info/WHEEL new file mode 100644 index 0000000..e264365 --- /dev/null +++ b/python3.9libs/peewee-3.14.8.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: false +Tag: cp37-cp37m-linux_x86_64 + diff --git a/python3.9libs/peewee-3.14.8.dist-info/top_level.txt b/python3.9libs/peewee-3.14.8.dist-info/top_level.txt new file mode 100644 index 0000000..1d507be --- /dev/null +++ b/python3.9libs/peewee-3.14.8.dist-info/top_level.txt @@ -0,0 +1,3 @@ +peewee +playhouse +pwiz diff --git a/python3.9libs/peewee.py b/python3.9libs/peewee.py new file mode 100644 index 0000000..4989399 --- /dev/null +++ b/python3.9libs/peewee.py @@ -0,0 +1,7814 @@ +from bisect import bisect_left +from bisect import bisect_right +from contextlib import contextmanager +from copy import deepcopy +from functools import wraps +from inspect import isclass +import calendar +import collections +import datetime +import decimal +import hashlib +import itertools +import logging +import operator +import re +import socket +import struct +import sys +import threading +import time +import uuid +import warnings +try: + from collections.abc import Mapping +except ImportError: + from collections import Mapping + +try: + from pysqlite3 import dbapi2 as pysq3 +except ImportError: + try: + from pysqlite2 import dbapi2 as pysq3 + except ImportError: + pysq3 = None +try: + import sqlite3 +except ImportError: + sqlite3 = pysq3 +else: + if pysq3 and pysq3.sqlite_version_info >= sqlite3.sqlite_version_info: + sqlite3 = pysq3 +try: + from psycopg2cffi import compat + compat.register() +except ImportError: + pass +try: + import psycopg2 + from psycopg2 import extensions as pg_extensions + try: + from psycopg2 import errors as pg_errors + except ImportError: + pg_errors = None +except ImportError: + psycopg2 = pg_errors = None +try: + from psycopg2.extras import register_uuid as pg_register_uuid + pg_register_uuid() +except Exception: + pass + +mysql_passwd = False +try: + import pymysql as mysql +except ImportError: + try: + import MySQLdb as mysql + mysql_passwd = True + except ImportError: + mysql = None + + +__version__ = '3.14.8' +__all__ = [ + 'AnyField', + 'AsIs', + 'AutoField', + 'BareField', + 'BigAutoField', + 'BigBitField', + 'BigIntegerField', + 'BinaryUUIDField', + 'BitField', + 'BlobField', + 'BooleanField', + 'Case', + 'Cast', + 'CharField', + 'Check', + 'chunked', + 'Column', + 'CompositeKey', + 'Context', + 'Database', + 'DatabaseError', + 'DatabaseProxy', + 'DataError', + 'DateField', + 'DateTimeField', + 'DecimalField', + 'DeferredForeignKey', + 'DeferredThroughModel', + 'DJANGO_MAP', + 'DoesNotExist', + 'DoubleField', + 'DQ', + 'EXCLUDED', + 'Field', + 'FixedCharField', + 'FloatField', + 'fn', + 'ForeignKeyField', + 'IdentityField', + 'ImproperlyConfigured', + 'Index', + 'IntegerField', + 'IntegrityError', + 'InterfaceError', + 'InternalError', + 'IPField', + 'JOIN', + 'ManyToManyField', + 'Model', + 'ModelIndex', + 'MySQLDatabase', + 'NotSupportedError', + 'OP', + 'OperationalError', + 'PostgresqlDatabase', + 'PrimaryKeyField', # XXX: Deprecated, change to AutoField. + 'prefetch', + 'ProgrammingError', + 'Proxy', + 'QualifiedNames', + 'SchemaManager', + 'SmallIntegerField', + 'Select', + 'SQL', + 'SqliteDatabase', + 'Table', + 'TextField', + 'TimeField', + 'TimestampField', + 'Tuple', + 'UUIDField', + 'Value', + 'ValuesList', + 'Window', +] + +try: # Python 2.7+ + from logging import NullHandler +except ImportError: + class NullHandler(logging.Handler): + def emit(self, record): + pass + +logger = logging.getLogger('peewee') +logger.addHandler(NullHandler()) + + +if sys.version_info[0] == 2: + text_type = unicode + bytes_type = str + buffer_type = buffer + izip_longest = itertools.izip_longest + callable_ = callable + multi_types = (list, tuple, frozenset, set) + exec('def reraise(tp, value, tb=None): raise tp, value, tb') + def print_(s): + sys.stdout.write(s) + sys.stdout.write('\n') +else: + import builtins + try: + from collections.abc import Callable + except ImportError: + from collections import Callable + from functools import reduce + callable_ = lambda c: isinstance(c, Callable) + text_type = str + bytes_type = bytes + buffer_type = memoryview + basestring = str + long = int + multi_types = (list, tuple, frozenset, set, range) + print_ = getattr(builtins, 'print') + izip_longest = itertools.zip_longest + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + +if sqlite3: + sqlite3.register_adapter(decimal.Decimal, str) + sqlite3.register_adapter(datetime.date, str) + sqlite3.register_adapter(datetime.time, str) + __sqlite_version__ = sqlite3.sqlite_version_info +else: + __sqlite_version__ = (0, 0, 0) + + +__date_parts__ = set(('year', 'month', 'day', 'hour', 'minute', 'second')) + +# Sqlite does not support the `date_part` SQL function, so we will define an +# implementation in python. +__sqlite_datetime_formats__ = ( + '%Y-%m-%d %H:%M:%S', + '%Y-%m-%d %H:%M:%S.%f', + '%Y-%m-%d', + '%H:%M:%S', + '%H:%M:%S.%f', + '%H:%M') + +__sqlite_date_trunc__ = { + 'year': '%Y-01-01 00:00:00', + 'month': '%Y-%m-01 00:00:00', + 'day': '%Y-%m-%d 00:00:00', + 'hour': '%Y-%m-%d %H:00:00', + 'minute': '%Y-%m-%d %H:%M:00', + 'second': '%Y-%m-%d %H:%M:%S'} + +__mysql_date_trunc__ = __sqlite_date_trunc__.copy() +__mysql_date_trunc__['minute'] = '%Y-%m-%d %H:%i:00' +__mysql_date_trunc__['second'] = '%Y-%m-%d %H:%i:%S' + +def _sqlite_date_part(lookup_type, datetime_string): + assert lookup_type in __date_parts__ + if not datetime_string: + return + dt = format_date_time(datetime_string, __sqlite_datetime_formats__) + return getattr(dt, lookup_type) + +def _sqlite_date_trunc(lookup_type, datetime_string): + assert lookup_type in __sqlite_date_trunc__ + if not datetime_string: + return + dt = format_date_time(datetime_string, __sqlite_datetime_formats__) + return dt.strftime(__sqlite_date_trunc__[lookup_type]) + + +def __deprecated__(s): + warnings.warn(s, DeprecationWarning) + + +class attrdict(dict): + def __getattr__(self, attr): + try: + return self[attr] + except KeyError: + raise AttributeError(attr) + def __setattr__(self, attr, value): self[attr] = value + def __iadd__(self, rhs): self.update(rhs); return self + def __add__(self, rhs): d = attrdict(self); d.update(rhs); return d + +SENTINEL = object() + +#: Operations for use in SQL expressions. +OP = attrdict( + AND='AND', + OR='OR', + ADD='+', + SUB='-', + MUL='*', + DIV='/', + BIN_AND='&', + BIN_OR='|', + XOR='#', + MOD='%', + EQ='=', + LT='<', + LTE='<=', + GT='>', + GTE='>=', + NE='!=', + IN='IN', + NOT_IN='NOT IN', + IS='IS', + IS_NOT='IS NOT', + LIKE='LIKE', + ILIKE='ILIKE', + BETWEEN='BETWEEN', + REGEXP='REGEXP', + IREGEXP='IREGEXP', + CONCAT='||', + BITWISE_NEGATION='~') + +# To support "django-style" double-underscore filters, create a mapping between +# operation name and operation code, e.g. "__eq" == OP.EQ. +DJANGO_MAP = attrdict({ + 'eq': operator.eq, + 'lt': operator.lt, + 'lte': operator.le, + 'gt': operator.gt, + 'gte': operator.ge, + 'ne': operator.ne, + 'in': operator.lshift, + 'is': lambda l, r: Expression(l, OP.IS, r), + 'like': lambda l, r: Expression(l, OP.LIKE, r), + 'ilike': lambda l, r: Expression(l, OP.ILIKE, r), + 'regexp': lambda l, r: Expression(l, OP.REGEXP, r), +}) + +#: Mapping of field type to the data-type supported by the database. Databases +#: may override or add to this list. +FIELD = attrdict( + AUTO='INTEGER', + BIGAUTO='BIGINT', + BIGINT='BIGINT', + BLOB='BLOB', + BOOL='SMALLINT', + CHAR='CHAR', + DATE='DATE', + DATETIME='DATETIME', + DECIMAL='DECIMAL', + DEFAULT='', + DOUBLE='REAL', + FLOAT='REAL', + INT='INTEGER', + SMALLINT='SMALLINT', + TEXT='TEXT', + TIME='TIME', + UUID='TEXT', + UUIDB='BLOB', + VARCHAR='VARCHAR') + +#: Join helpers (for convenience) -- all join types are supported, this object +#: is just to help avoid introducing errors by using strings everywhere. +JOIN = attrdict( + INNER='INNER JOIN', + LEFT_OUTER='LEFT OUTER JOIN', + RIGHT_OUTER='RIGHT OUTER JOIN', + FULL='FULL JOIN', + FULL_OUTER='FULL OUTER JOIN', + CROSS='CROSS JOIN', + NATURAL='NATURAL JOIN', + LATERAL='LATERAL', + LEFT_LATERAL='LEFT JOIN LATERAL') + +# Row representations. +ROW = attrdict( + TUPLE=1, + DICT=2, + NAMED_TUPLE=3, + CONSTRUCTOR=4, + MODEL=5) + +SCOPE_NORMAL = 1 +SCOPE_SOURCE = 2 +SCOPE_VALUES = 4 +SCOPE_CTE = 8 +SCOPE_COLUMN = 16 + +# Rules for parentheses around subqueries in compound select. +CSQ_PARENTHESES_NEVER = 0 +CSQ_PARENTHESES_ALWAYS = 1 +CSQ_PARENTHESES_UNNESTED = 2 + +# Regular expressions used to convert class names to snake-case table names. +# First regex handles acronym followed by word or initial lower-word followed +# by a capitalized word. e.g. APIResponse -> API_Response / fooBar -> foo_Bar. +# Second regex handles the normal case of two title-cased words. +SNAKE_CASE_STEP1 = re.compile('(.)_*([A-Z][a-z]+)') +SNAKE_CASE_STEP2 = re.compile('([a-z0-9])_*([A-Z])') + +# Helper functions that are used in various parts of the codebase. +MODEL_BASE = '_metaclass_helper_' + +def with_metaclass(meta, base=object): + return meta(MODEL_BASE, (base,), {}) + +def merge_dict(source, overrides): + merged = source.copy() + if overrides: + merged.update(overrides) + return merged + +def quote(path, quote_chars): + if len(path) == 1: + return path[0].join(quote_chars) + return '.'.join([part.join(quote_chars) for part in path]) + +is_model = lambda o: isclass(o) and issubclass(o, Model) + +def ensure_tuple(value): + if value is not None: + return value if isinstance(value, (list, tuple)) else (value,) + +def ensure_entity(value): + if value is not None: + return value if isinstance(value, Node) else Entity(value) + +def make_snake_case(s): + first = SNAKE_CASE_STEP1.sub(r'\1_\2', s) + return SNAKE_CASE_STEP2.sub(r'\1_\2', first).lower() + +def chunked(it, n): + marker = object() + for group in (list(g) for g in izip_longest(*[iter(it)] * n, + fillvalue=marker)): + if group[-1] is marker: + del group[group.index(marker):] + yield group + + +class _callable_context_manager(object): + def __call__(self, fn): + @wraps(fn) + def inner(*args, **kwargs): + with self: + return fn(*args, **kwargs) + return inner + + +class Proxy(object): + """ + Create a proxy or placeholder for another object. + """ + __slots__ = ('obj', '_callbacks') + + def __init__(self): + self._callbacks = [] + self.initialize(None) + + def initialize(self, obj): + self.obj = obj + for callback in self._callbacks: + callback(obj) + + def attach_callback(self, callback): + self._callbacks.append(callback) + return callback + + def passthrough(method): + def inner(self, *args, **kwargs): + if self.obj is None: + raise AttributeError('Cannot use uninitialized Proxy.') + return getattr(self.obj, method)(*args, **kwargs) + return inner + + # Allow proxy to be used as a context-manager. + __enter__ = passthrough('__enter__') + __exit__ = passthrough('__exit__') + + def __getattr__(self, attr): + if self.obj is None: + raise AttributeError('Cannot use uninitialized Proxy.') + return getattr(self.obj, attr) + + def __setattr__(self, attr, value): + if attr not in self.__slots__: + raise AttributeError('Cannot set attribute on proxy.') + return super(Proxy, self).__setattr__(attr, value) + + +class DatabaseProxy(Proxy): + """ + Proxy implementation specifically for proxying `Database` objects. + """ + def connection_context(self): + return ConnectionContext(self) + def atomic(self, *args, **kwargs): + return _atomic(self, *args, **kwargs) + def manual_commit(self): + return _manual(self) + def transaction(self, *args, **kwargs): + return _transaction(self, *args, **kwargs) + def savepoint(self): + return _savepoint(self) + + +class ModelDescriptor(object): pass + + +# SQL Generation. + + +class AliasManager(object): + __slots__ = ('_counter', '_current_index', '_mapping') + + def __init__(self): + # A list of dictionaries containing mappings at various depths. + self._counter = 0 + self._current_index = 0 + self._mapping = [] + self.push() + + @property + def mapping(self): + return self._mapping[self._current_index - 1] + + def add(self, source): + if source not in self.mapping: + self._counter += 1 + self[source] = 't%d' % self._counter + return self.mapping[source] + + def get(self, source, any_depth=False): + if any_depth: + for idx in reversed(range(self._current_index)): + if source in self._mapping[idx]: + return self._mapping[idx][source] + return self.add(source) + + def __getitem__(self, source): + return self.get(source) + + def __setitem__(self, source, alias): + self.mapping[source] = alias + + def push(self): + self._current_index += 1 + if self._current_index > len(self._mapping): + self._mapping.append({}) + + def pop(self): + if self._current_index == 1: + raise ValueError('Cannot pop() from empty alias manager.') + self._current_index -= 1 + + +class State(collections.namedtuple('_State', ('scope', 'parentheses', + 'settings'))): + def __new__(cls, scope=SCOPE_NORMAL, parentheses=False, **kwargs): + return super(State, cls).__new__(cls, scope, parentheses, kwargs) + + def __call__(self, scope=None, parentheses=None, **kwargs): + # Scope and settings are "inherited" (parentheses is not, however). + scope = self.scope if scope is None else scope + + # Try to avoid unnecessary dict copying. + if kwargs and self.settings: + settings = self.settings.copy() # Copy original settings dict. + settings.update(kwargs) # Update copy with overrides. + elif kwargs: + settings = kwargs + else: + settings = self.settings + return State(scope, parentheses, **settings) + + def __getattr__(self, attr_name): + return self.settings.get(attr_name) + + +def __scope_context__(scope): + @contextmanager + def inner(self, **kwargs): + with self(scope=scope, **kwargs): + yield self + return inner + + +class Context(object): + __slots__ = ('stack', '_sql', '_values', 'alias_manager', 'state') + + def __init__(self, **settings): + self.stack = [] + self._sql = [] + self._values = [] + self.alias_manager = AliasManager() + self.state = State(**settings) + + def as_new(self): + return Context(**self.state.settings) + + def column_sort_key(self, item): + return item[0].get_sort_key(self) + + @property + def scope(self): + return self.state.scope + + @property + def parentheses(self): + return self.state.parentheses + + @property + def subquery(self): + return self.state.subquery + + def __call__(self, **overrides): + if overrides and overrides.get('scope') == self.scope: + del overrides['scope'] + + self.stack.append(self.state) + self.state = self.state(**overrides) + return self + + scope_normal = __scope_context__(SCOPE_NORMAL) + scope_source = __scope_context__(SCOPE_SOURCE) + scope_values = __scope_context__(SCOPE_VALUES) + scope_cte = __scope_context__(SCOPE_CTE) + scope_column = __scope_context__(SCOPE_COLUMN) + + def __enter__(self): + if self.parentheses: + self.literal('(') + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.parentheses: + self.literal(')') + self.state = self.stack.pop() + + @contextmanager + def push_alias(self): + self.alias_manager.push() + yield + self.alias_manager.pop() + + def sql(self, obj): + if isinstance(obj, (Node, Context)): + return obj.__sql__(self) + elif is_model(obj): + return obj._meta.table.__sql__(self) + else: + return self.sql(Value(obj)) + + def literal(self, keyword): + self._sql.append(keyword) + return self + + def value(self, value, converter=None, add_param=True): + if converter: + value = converter(value) + elif converter is None and self.state.converter: + # Explicitly check for None so that "False" can be used to signify + # that no conversion should be applied. + value = self.state.converter(value) + + if isinstance(value, Node): + with self(converter=None): + return self.sql(value) + elif is_model(value): + # Under certain circumstances, we could end-up treating a model- + # class itself as a value. This check ensures that we drop the + # table alias into the query instead of trying to parameterize a + # model (for instance, passing a model as a function argument). + with self.scope_column(): + return self.sql(value) + + self._values.append(value) + return self.literal(self.state.param or '?') if add_param else self + + def __sql__(self, ctx): + ctx._sql.extend(self._sql) + ctx._values.extend(self._values) + return ctx + + def parse(self, node): + return self.sql(node).query() + + def query(self): + return ''.join(self._sql), self._values + + +def query_to_string(query): + # NOTE: this function is not exported by default as it might be misused -- + # and this misuse could lead to sql injection vulnerabilities. This + # function is intended for debugging or logging purposes ONLY. + db = getattr(query, '_database', None) + if db is not None: + ctx = db.get_sql_context() + else: + ctx = Context() + + sql, params = ctx.sql(query).query() + if not params: + return sql + + param = ctx.state.param or '?' + if param == '?': + sql = sql.replace('?', '%s') + + return sql % tuple(map(_query_val_transform, params)) + +def _query_val_transform(v): + # Interpolate parameters. + if isinstance(v, (text_type, datetime.datetime, datetime.date, + datetime.time)): + v = "'%s'" % v + elif isinstance(v, bytes_type): + try: + v = v.decode('utf8') + except UnicodeDecodeError: + v = v.decode('raw_unicode_escape') + v = "'%s'" % v + elif isinstance(v, int): + v = '%s' % int(v) # Also handles booleans -> 1 or 0. + elif v is None: + v = 'NULL' + else: + v = str(v) + return v + + +# AST. + + +class Node(object): + _coerce = True + + def clone(self): + obj = self.__class__.__new__(self.__class__) + obj.__dict__ = self.__dict__.copy() + return obj + + def __sql__(self, ctx): + raise NotImplementedError + + @staticmethod + def copy(method): + def inner(self, *args, **kwargs): + clone = self.clone() + method(clone, *args, **kwargs) + return clone + return inner + + def coerce(self, _coerce=True): + if _coerce != self._coerce: + clone = self.clone() + clone._coerce = _coerce + return clone + return self + + def is_alias(self): + return False + + def unwrap(self): + return self + + +class ColumnFactory(object): + __slots__ = ('node',) + + def __init__(self, node): + self.node = node + + def __getattr__(self, attr): + return Column(self.node, attr) + + +class _DynamicColumn(object): + __slots__ = () + + def __get__(self, instance, instance_type=None): + if instance is not None: + return ColumnFactory(instance) # Implements __getattr__(). + return self + + +class _ExplicitColumn(object): + __slots__ = () + + def __get__(self, instance, instance_type=None): + if instance is not None: + raise AttributeError( + '%s specifies columns explicitly, and does not support ' + 'dynamic column lookups.' % instance) + return self + + +class Source(Node): + c = _DynamicColumn() + + def __init__(self, alias=None): + super(Source, self).__init__() + self._alias = alias + + @Node.copy + def alias(self, name): + self._alias = name + + def select(self, *columns): + if not columns: + columns = (SQL('*'),) + return Select((self,), columns) + + def join(self, dest, join_type=JOIN.INNER, on=None): + return Join(self, dest, join_type, on) + + def left_outer_join(self, dest, on=None): + return Join(self, dest, JOIN.LEFT_OUTER, on) + + def cte(self, name, recursive=False, columns=None, materialized=None): + return CTE(name, self, recursive=recursive, columns=columns, + materialized=materialized) + + def get_sort_key(self, ctx): + if self._alias: + return (self._alias,) + return (ctx.alias_manager[self],) + + def apply_alias(self, ctx): + # If we are defining the source, include the "AS alias" declaration. An + # alias is created for the source if one is not already defined. + if ctx.scope == SCOPE_SOURCE: + if self._alias: + ctx.alias_manager[self] = self._alias + ctx.literal(' AS ').sql(Entity(ctx.alias_manager[self])) + return ctx + + def apply_column(self, ctx): + if self._alias: + ctx.alias_manager[self] = self._alias + return ctx.sql(Entity(ctx.alias_manager[self])) + + +class _HashableSource(object): + def __init__(self, *args, **kwargs): + super(_HashableSource, self).__init__(*args, **kwargs) + self._update_hash() + + @Node.copy + def alias(self, name): + self._alias = name + self._update_hash() + + def _update_hash(self): + self._hash = self._get_hash() + + def _get_hash(self): + return hash((self.__class__, self._path, self._alias)) + + def __hash__(self): + return self._hash + + def __eq__(self, other): + if isinstance(other, _HashableSource): + return self._hash == other._hash + return Expression(self, OP.EQ, other) + + def __ne__(self, other): + if isinstance(other, _HashableSource): + return self._hash != other._hash + return Expression(self, OP.NE, other) + + def _e(op): + def inner(self, rhs): + return Expression(self, op, rhs) + return inner + __lt__ = _e(OP.LT) + __le__ = _e(OP.LTE) + __gt__ = _e(OP.GT) + __ge__ = _e(OP.GTE) + + +def __bind_database__(meth): + @wraps(meth) + def inner(self, *args, **kwargs): + result = meth(self, *args, **kwargs) + if self._database: + return result.bind(self._database) + return result + return inner + + +def __join__(join_type=JOIN.INNER, inverted=False): + def method(self, other): + if inverted: + self, other = other, self + return Join(self, other, join_type=join_type) + return method + + +class BaseTable(Source): + __and__ = __join__(JOIN.INNER) + __add__ = __join__(JOIN.LEFT_OUTER) + __sub__ = __join__(JOIN.RIGHT_OUTER) + __or__ = __join__(JOIN.FULL_OUTER) + __mul__ = __join__(JOIN.CROSS) + __rand__ = __join__(JOIN.INNER, inverted=True) + __radd__ = __join__(JOIN.LEFT_OUTER, inverted=True) + __rsub__ = __join__(JOIN.RIGHT_OUTER, inverted=True) + __ror__ = __join__(JOIN.FULL_OUTER, inverted=True) + __rmul__ = __join__(JOIN.CROSS, inverted=True) + + +class _BoundTableContext(_callable_context_manager): + def __init__(self, table, database): + self.table = table + self.database = database + + def __enter__(self): + self._orig_database = self.table._database + self.table.bind(self.database) + if self.table._model is not None: + self.table._model.bind(self.database) + return self.table + + def __exit__(self, exc_type, exc_val, exc_tb): + self.table.bind(self._orig_database) + if self.table._model is not None: + self.table._model.bind(self._orig_database) + + +class Table(_HashableSource, BaseTable): + def __init__(self, name, columns=None, primary_key=None, schema=None, + alias=None, _model=None, _database=None): + self.__name__ = name + self._columns = columns + self._primary_key = primary_key + self._schema = schema + self._path = (schema, name) if schema else (name,) + self._model = _model + self._database = _database + super(Table, self).__init__(alias=alias) + + # Allow tables to restrict what columns are available. + if columns is not None: + self.c = _ExplicitColumn() + for column in columns: + setattr(self, column, Column(self, column)) + + if primary_key: + col_src = self if self._columns else self.c + self.primary_key = getattr(col_src, primary_key) + else: + self.primary_key = None + + def clone(self): + # Ensure a deep copy of the column instances. + return Table( + self.__name__, + columns=self._columns, + primary_key=self._primary_key, + schema=self._schema, + alias=self._alias, + _model=self._model, + _database=self._database) + + def bind(self, database=None): + self._database = database + return self + + def bind_ctx(self, database=None): + return _BoundTableContext(self, database) + + def _get_hash(self): + return hash((self.__class__, self._path, self._alias, self._model)) + + @__bind_database__ + def select(self, *columns): + if not columns and self._columns: + columns = [Column(self, column) for column in self._columns] + return Select((self,), columns) + + @__bind_database__ + def insert(self, insert=None, columns=None, **kwargs): + if kwargs: + insert = {} if insert is None else insert + src = self if self._columns else self.c + for key, value in kwargs.items(): + insert[getattr(src, key)] = value + return Insert(self, insert=insert, columns=columns) + + @__bind_database__ + def replace(self, insert=None, columns=None, **kwargs): + return (self + .insert(insert=insert, columns=columns) + .on_conflict('REPLACE')) + + @__bind_database__ + def update(self, update=None, **kwargs): + if kwargs: + update = {} if update is None else update + for key, value in kwargs.items(): + src = self if self._columns else self.c + update[getattr(src, key)] = value + return Update(self, update=update) + + @__bind_database__ + def delete(self): + return Delete(self) + + def __sql__(self, ctx): + if ctx.scope == SCOPE_VALUES: + # Return the quoted table name. + return ctx.sql(Entity(*self._path)) + + if self._alias: + ctx.alias_manager[self] = self._alias + + if ctx.scope == SCOPE_SOURCE: + # Define the table and its alias. + return self.apply_alias(ctx.sql(Entity(*self._path))) + else: + # Refer to the table using the alias. + return self.apply_column(ctx) + + +class Join(BaseTable): + def __init__(self, lhs, rhs, join_type=JOIN.INNER, on=None, alias=None): + super(Join, self).__init__(alias=alias) + self.lhs = lhs + self.rhs = rhs + self.join_type = join_type + self._on = on + + def on(self, predicate): + self._on = predicate + return self + + def __sql__(self, ctx): + (ctx + .sql(self.lhs) + .literal(' %s ' % self.join_type) + .sql(self.rhs)) + if self._on is not None: + ctx.literal(' ON ').sql(self._on) + return ctx + + +class ValuesList(_HashableSource, BaseTable): + def __init__(self, values, columns=None, alias=None): + self._values = values + self._columns = columns + super(ValuesList, self).__init__(alias=alias) + + def _get_hash(self): + return hash((self.__class__, id(self._values), self._alias)) + + @Node.copy + def columns(self, *names): + self._columns = names + + def __sql__(self, ctx): + if self._alias: + ctx.alias_manager[self] = self._alias + + if ctx.scope == SCOPE_SOURCE or ctx.scope == SCOPE_NORMAL: + with ctx(parentheses=not ctx.parentheses): + ctx = (ctx + .literal('VALUES ') + .sql(CommaNodeList([ + EnclosedNodeList(row) for row in self._values]))) + + if ctx.scope == SCOPE_SOURCE: + ctx.literal(' AS ').sql(Entity(ctx.alias_manager[self])) + if self._columns: + entities = [Entity(c) for c in self._columns] + ctx.sql(EnclosedNodeList(entities)) + else: + ctx.sql(Entity(ctx.alias_manager[self])) + + return ctx + + +class CTE(_HashableSource, Source): + def __init__(self, name, query, recursive=False, columns=None, + materialized=None): + self._alias = name + self._query = query + self._recursive = recursive + self._materialized = materialized + if columns is not None: + columns = [Entity(c) if isinstance(c, basestring) else c + for c in columns] + self._columns = columns + query._cte_list = () + super(CTE, self).__init__(alias=name) + + def select_from(self, *columns): + if not columns: + raise ValueError('select_from() must specify one or more columns ' + 'from the CTE to select.') + + query = (Select((self,), columns) + .with_cte(self) + .bind(self._query._database)) + try: + query = query.objects(self._query.model) + except AttributeError: + pass + return query + + def _get_hash(self): + return hash((self.__class__, self._alias, id(self._query))) + + def union_all(self, rhs): + clone = self._query.clone() + return CTE(self._alias, clone + rhs, self._recursive, self._columns) + __add__ = union_all + + def union(self, rhs): + clone = self._query.clone() + return CTE(self._alias, clone | rhs, self._recursive, self._columns) + __or__ = union + + def __sql__(self, ctx): + if ctx.scope != SCOPE_CTE: + return ctx.sql(Entity(self._alias)) + + with ctx.push_alias(): + ctx.alias_manager[self] = self._alias + ctx.sql(Entity(self._alias)) + + if self._columns: + ctx.literal(' ').sql(EnclosedNodeList(self._columns)) + ctx.literal(' AS ') + + if self._materialized: + ctx.literal('MATERIALIZED ') + elif self._materialized is False: + ctx.literal('NOT MATERIALIZED ') + + with ctx.scope_normal(parentheses=True): + ctx.sql(self._query) + return ctx + + +class ColumnBase(Node): + _converter = None + + @Node.copy + def converter(self, converter=None): + self._converter = converter + + def alias(self, alias): + if alias: + return Alias(self, alias) + return self + + def unalias(self): + return self + + def cast(self, as_type): + return Cast(self, as_type) + + def asc(self, collation=None, nulls=None): + return Asc(self, collation=collation, nulls=nulls) + __pos__ = asc + + def desc(self, collation=None, nulls=None): + return Desc(self, collation=collation, nulls=nulls) + __neg__ = desc + + def __invert__(self): + return Negated(self) + + def _e(op, inv=False): + """ + Lightweight factory which returns a method that builds an Expression + consisting of the left-hand and right-hand operands, using `op`. + """ + def inner(self, rhs): + if inv: + return Expression(rhs, op, self) + return Expression(self, op, rhs) + return inner + __and__ = _e(OP.AND) + __or__ = _e(OP.OR) + + __add__ = _e(OP.ADD) + __sub__ = _e(OP.SUB) + __mul__ = _e(OP.MUL) + __div__ = __truediv__ = _e(OP.DIV) + __xor__ = _e(OP.XOR) + __radd__ = _e(OP.ADD, inv=True) + __rsub__ = _e(OP.SUB, inv=True) + __rmul__ = _e(OP.MUL, inv=True) + __rdiv__ = __rtruediv__ = _e(OP.DIV, inv=True) + __rand__ = _e(OP.AND, inv=True) + __ror__ = _e(OP.OR, inv=True) + __rxor__ = _e(OP.XOR, inv=True) + + def __eq__(self, rhs): + op = OP.IS if rhs is None else OP.EQ + return Expression(self, op, rhs) + def __ne__(self, rhs): + op = OP.IS_NOT if rhs is None else OP.NE + return Expression(self, op, rhs) + + __lt__ = _e(OP.LT) + __le__ = _e(OP.LTE) + __gt__ = _e(OP.GT) + __ge__ = _e(OP.GTE) + __lshift__ = _e(OP.IN) + __rshift__ = _e(OP.IS) + __mod__ = _e(OP.LIKE) + __pow__ = _e(OP.ILIKE) + + like = _e(OP.LIKE) + ilike = _e(OP.ILIKE) + + bin_and = _e(OP.BIN_AND) + bin_or = _e(OP.BIN_OR) + in_ = _e(OP.IN) + not_in = _e(OP.NOT_IN) + regexp = _e(OP.REGEXP) + + # Special expressions. + def is_null(self, is_null=True): + op = OP.IS if is_null else OP.IS_NOT + return Expression(self, op, None) + + def _escape_like_expr(self, s, template): + if s.find('_') >= 0 or s.find('%') >= 0 or s.find('\\') >= 0: + s = s.replace('\\', '\\\\').replace('_', '\\_').replace('%', '\\%') + return NodeList((template % s, SQL('ESCAPE'), '\\')) + return template % s + def contains(self, rhs): + if isinstance(rhs, Node): + rhs = Expression('%', OP.CONCAT, + Expression(rhs, OP.CONCAT, '%')) + else: + rhs = self._escape_like_expr(rhs, '%%%s%%') + return Expression(self, OP.ILIKE, rhs) + def startswith(self, rhs): + if isinstance(rhs, Node): + rhs = Expression(rhs, OP.CONCAT, '%') + else: + rhs = self._escape_like_expr(rhs, '%s%%') + return Expression(self, OP.ILIKE, rhs) + def endswith(self, rhs): + if isinstance(rhs, Node): + rhs = Expression('%', OP.CONCAT, rhs) + else: + rhs = self._escape_like_expr(rhs, '%%%s') + return Expression(self, OP.ILIKE, rhs) + def between(self, lo, hi): + return Expression(self, OP.BETWEEN, NodeList((lo, SQL('AND'), hi))) + def concat(self, rhs): + return StringExpression(self, OP.CONCAT, rhs) + def regexp(self, rhs): + return Expression(self, OP.REGEXP, rhs) + def iregexp(self, rhs): + return Expression(self, OP.IREGEXP, rhs) + def __getitem__(self, item): + if isinstance(item, slice): + if item.start is None or item.stop is None: + raise ValueError('BETWEEN range must have both a start- and ' + 'end-point.') + return self.between(item.start, item.stop) + return self == item + + def distinct(self): + return NodeList((SQL('DISTINCT'), self)) + + def collate(self, collation): + return NodeList((self, SQL('COLLATE %s' % collation))) + + def get_sort_key(self, ctx): + return () + + +class Column(ColumnBase): + def __init__(self, source, name): + self.source = source + self.name = name + + def get_sort_key(self, ctx): + if ctx.scope == SCOPE_VALUES: + return (self.name,) + else: + return self.source.get_sort_key(ctx) + (self.name,) + + def __hash__(self): + return hash((self.source, self.name)) + + def __sql__(self, ctx): + if ctx.scope == SCOPE_VALUES: + return ctx.sql(Entity(self.name)) + else: + with ctx.scope_column(): + return ctx.sql(self.source).literal('.').sql(Entity(self.name)) + + +class WrappedNode(ColumnBase): + def __init__(self, node): + self.node = node + self._coerce = getattr(node, '_coerce', True) + self._converter = getattr(node, '_converter', None) + + def is_alias(self): + return self.node.is_alias() + + def unwrap(self): + return self.node.unwrap() + + +class EntityFactory(object): + __slots__ = ('node',) + def __init__(self, node): + self.node = node + def __getattr__(self, attr): + return Entity(self.node, attr) + + +class _DynamicEntity(object): + __slots__ = () + def __get__(self, instance, instance_type=None): + if instance is not None: + return EntityFactory(instance._alias) # Implements __getattr__(). + return self + + +class Alias(WrappedNode): + c = _DynamicEntity() + + def __init__(self, node, alias): + super(Alias, self).__init__(node) + self._alias = alias + + def __hash__(self): + return hash(self._alias) + + def alias(self, alias=None): + if alias is None: + return self.node + else: + return Alias(self.node, alias) + + def unalias(self): + return self.node + + def is_alias(self): + return True + + def __sql__(self, ctx): + if ctx.scope == SCOPE_SOURCE: + return (ctx + .sql(self.node) + .literal(' AS ') + .sql(Entity(self._alias))) + else: + return ctx.sql(Entity(self._alias)) + + +class Negated(WrappedNode): + def __invert__(self): + return self.node + + def __sql__(self, ctx): + return ctx.literal('NOT ').sql(self.node) + + +class BitwiseMixin(object): + def __and__(self, other): + return self.bin_and(other) + + def __or__(self, other): + return self.bin_or(other) + + def __sub__(self, other): + return self.bin_and(other.bin_negated()) + + def __invert__(self): + return BitwiseNegated(self) + + +class BitwiseNegated(BitwiseMixin, WrappedNode): + def __invert__(self): + return self.node + + def __sql__(self, ctx): + if ctx.state.operations: + op_sql = ctx.state.operations.get(self.op, self.op) + else: + op_sql = self.op + return ctx.literal(op_sql).sql(self.node) + + +class Value(ColumnBase): + def __init__(self, value, converter=None, unpack=True): + self.value = value + self.converter = converter + self.multi = unpack and isinstance(self.value, multi_types) + if self.multi: + self.values = [] + for item in self.value: + if isinstance(item, Node): + self.values.append(item) + else: + self.values.append(Value(item, self.converter)) + + def __sql__(self, ctx): + if self.multi: + # For multi-part values (e.g. lists of IDs). + return ctx.sql(EnclosedNodeList(self.values)) + + return ctx.value(self.value, self.converter) + + +def AsIs(value): + return Value(value, unpack=False) + + +class Cast(WrappedNode): + def __init__(self, node, cast): + super(Cast, self).__init__(node) + self._cast = cast + self._coerce = False + + def __sql__(self, ctx): + return (ctx + .literal('CAST(') + .sql(self.node) + .literal(' AS %s)' % self._cast)) + + +class Ordering(WrappedNode): + def __init__(self, node, direction, collation=None, nulls=None): + super(Ordering, self).__init__(node) + self.direction = direction + self.collation = collation + self.nulls = nulls + if nulls and nulls.lower() not in ('first', 'last'): + raise ValueError('Ordering nulls= parameter must be "first" or ' + '"last", got: %s' % nulls) + + def collate(self, collation=None): + return Ordering(self.node, self.direction, collation) + + def _null_ordering_case(self, nulls): + if nulls.lower() == 'last': + ifnull, notnull = 1, 0 + elif nulls.lower() == 'first': + ifnull, notnull = 0, 1 + else: + raise ValueError('unsupported value for nulls= ordering.') + return Case(None, ((self.node.is_null(), ifnull),), notnull) + + def __sql__(self, ctx): + if self.nulls and not ctx.state.nulls_ordering: + ctx.sql(self._null_ordering_case(self.nulls)).literal(', ') + + ctx.sql(self.node).literal(' %s' % self.direction) + if self.collation: + ctx.literal(' COLLATE %s' % self.collation) + if self.nulls and ctx.state.nulls_ordering: + ctx.literal(' NULLS %s' % self.nulls) + return ctx + + +def Asc(node, collation=None, nulls=None): + return Ordering(node, 'ASC', collation, nulls) + + +def Desc(node, collation=None, nulls=None): + return Ordering(node, 'DESC', collation, nulls) + + +class Expression(ColumnBase): + def __init__(self, lhs, op, rhs, flat=False): + self.lhs = lhs + self.op = op + self.rhs = rhs + self.flat = flat + + def __sql__(self, ctx): + overrides = {'parentheses': not self.flat, 'in_expr': True} + + # First attempt to unwrap the node on the left-hand-side, so that we + # can get at the underlying Field if one is present. + node = raw_node = self.lhs + if isinstance(raw_node, WrappedNode): + node = raw_node.unwrap() + + # Set up the appropriate converter if we have a field on the left side. + if isinstance(node, Field) and raw_node._coerce: + overrides['converter'] = node.db_value + overrides['is_fk_expr'] = isinstance(node, ForeignKeyField) + else: + overrides['converter'] = None + + if ctx.state.operations: + op_sql = ctx.state.operations.get(self.op, self.op) + else: + op_sql = self.op + + with ctx(**overrides): + # Postgresql reports an error for IN/NOT IN (), so convert to + # the equivalent boolean expression. + op_in = self.op == OP.IN or self.op == OP.NOT_IN + if op_in and ctx.as_new().parse(self.rhs)[0] == '()': + return ctx.literal('0 = 1' if self.op == OP.IN else '1 = 1') + + return (ctx + .sql(self.lhs) + .literal(' %s ' % op_sql) + .sql(self.rhs)) + + +class StringExpression(Expression): + def __add__(self, rhs): + return self.concat(rhs) + def __radd__(self, lhs): + return StringExpression(lhs, OP.CONCAT, self) + + +class Entity(ColumnBase): + def __init__(self, *path): + self._path = [part.replace('"', '""') for part in path if part] + + def __getattr__(self, attr): + return Entity(*self._path + [attr]) + + def get_sort_key(self, ctx): + return tuple(self._path) + + def __hash__(self): + return hash((self.__class__.__name__, tuple(self._path))) + + def __sql__(self, ctx): + return ctx.literal(quote(self._path, ctx.state.quote or '""')) + + +class SQL(ColumnBase): + def __init__(self, sql, params=None): + self.sql = sql + self.params = params + + def __sql__(self, ctx): + ctx.literal(self.sql) + if self.params: + for param in self.params: + ctx.value(param, False, add_param=False) + return ctx + + +def Check(constraint, name=None): + check = SQL('CHECK (%s)' % constraint) + if not name: + return check + return NodeList((SQL('CONSTRAINT'), Entity(name), check)) + + +class Function(ColumnBase): + def __init__(self, name, arguments, coerce=True, python_value=None): + self.name = name + self.arguments = arguments + self._filter = None + self._order_by = None + self._python_value = python_value + if name and name.lower() in ('sum', 'count', 'cast', 'array_agg'): + self._coerce = False + else: + self._coerce = coerce + + def __getattr__(self, attr): + def decorator(*args, **kwargs): + return Function(attr, args, **kwargs) + return decorator + + @Node.copy + def filter(self, where=None): + self._filter = where + + @Node.copy + def order_by(self, *ordering): + self._order_by = ordering + + @Node.copy + def python_value(self, func=None): + self._python_value = func + + def over(self, partition_by=None, order_by=None, start=None, end=None, + frame_type=None, window=None, exclude=None): + if isinstance(partition_by, Window) and window is None: + window = partition_by + + if window is not None: + node = WindowAlias(window) + else: + node = Window(partition_by=partition_by, order_by=order_by, + start=start, end=end, frame_type=frame_type, + exclude=exclude, _inline=True) + return NodeList((self, SQL('OVER'), node)) + + def __sql__(self, ctx): + ctx.literal(self.name) + if not len(self.arguments): + ctx.literal('()') + else: + args = self.arguments + + # If this is an ordered aggregate, then we will modify the last + # argument to append the ORDER BY ... clause. We do this to avoid + # double-wrapping any expression args in parentheses, as NodeList + # has a special check (hack) in place to work around this. + if self._order_by: + args = list(args) + args[-1] = NodeList((args[-1], SQL('ORDER BY'), + CommaNodeList(self._order_by))) + + with ctx(in_function=True, function_arg_count=len(self.arguments)): + ctx.sql(EnclosedNodeList([ + (arg if isinstance(arg, Node) else Value(arg, False)) + for arg in args])) + + if self._filter: + ctx.literal(' FILTER (WHERE ').sql(self._filter).literal(')') + return ctx + + +fn = Function(None, None) + + +class Window(Node): + # Frame start/end and frame exclusion. + CURRENT_ROW = SQL('CURRENT ROW') + GROUP = SQL('GROUP') + TIES = SQL('TIES') + NO_OTHERS = SQL('NO OTHERS') + + # Frame types. + GROUPS = 'GROUPS' + RANGE = 'RANGE' + ROWS = 'ROWS' + + def __init__(self, partition_by=None, order_by=None, start=None, end=None, + frame_type=None, extends=None, exclude=None, alias=None, + _inline=False): + super(Window, self).__init__() + if start is not None and not isinstance(start, SQL): + start = SQL(start) + if end is not None and not isinstance(end, SQL): + end = SQL(end) + + self.partition_by = ensure_tuple(partition_by) + self.order_by = ensure_tuple(order_by) + self.start = start + self.end = end + if self.start is None and self.end is not None: + raise ValueError('Cannot specify WINDOW end without start.') + self._alias = alias or 'w' + self._inline = _inline + self.frame_type = frame_type + self._extends = extends + self._exclude = exclude + + def alias(self, alias=None): + self._alias = alias or 'w' + return self + + @Node.copy + def as_range(self): + self.frame_type = Window.RANGE + + @Node.copy + def as_rows(self): + self.frame_type = Window.ROWS + + @Node.copy + def as_groups(self): + self.frame_type = Window.GROUPS + + @Node.copy + def extends(self, window=None): + self._extends = window + + @Node.copy + def exclude(self, frame_exclusion=None): + if isinstance(frame_exclusion, basestring): + frame_exclusion = SQL(frame_exclusion) + self._exclude = frame_exclusion + + @staticmethod + def following(value=None): + if value is None: + return SQL('UNBOUNDED FOLLOWING') + return SQL('%d FOLLOWING' % value) + + @staticmethod + def preceding(value=None): + if value is None: + return SQL('UNBOUNDED PRECEDING') + return SQL('%d PRECEDING' % value) + + def __sql__(self, ctx): + if ctx.scope != SCOPE_SOURCE and not self._inline: + ctx.literal(self._alias) + ctx.literal(' AS ') + + with ctx(parentheses=True): + parts = [] + if self._extends is not None: + ext = self._extends + if isinstance(ext, Window): + ext = SQL(ext._alias) + elif isinstance(ext, basestring): + ext = SQL(ext) + parts.append(ext) + if self.partition_by: + parts.extend(( + SQL('PARTITION BY'), + CommaNodeList(self.partition_by))) + if self.order_by: + parts.extend(( + SQL('ORDER BY'), + CommaNodeList(self.order_by))) + if self.start is not None and self.end is not None: + frame = self.frame_type or 'ROWS' + parts.extend(( + SQL('%s BETWEEN' % frame), + self.start, + SQL('AND'), + self.end)) + elif self.start is not None: + parts.extend((SQL(self.frame_type or 'ROWS'), self.start)) + elif self.frame_type is not None: + parts.append(SQL('%s UNBOUNDED PRECEDING' % self.frame_type)) + if self._exclude is not None: + parts.extend((SQL('EXCLUDE'), self._exclude)) + ctx.sql(NodeList(parts)) + return ctx + + +class WindowAlias(Node): + def __init__(self, window): + self.window = window + + def alias(self, window_alias): + self.window._alias = window_alias + return self + + def __sql__(self, ctx): + return ctx.literal(self.window._alias or 'w') + + +class ForUpdate(Node): + def __init__(self, expr, of=None, nowait=None): + expr = 'FOR UPDATE' if expr is True else expr + if expr.lower().endswith('nowait'): + expr = expr[:-7] # Strip off the "nowait" bit. + nowait = True + + self._expr = expr + if of is not None and not isinstance(of, (list, set, tuple)): + of = (of,) + self._of = of + self._nowait = nowait + + def __sql__(self, ctx): + ctx.literal(self._expr) + if self._of is not None: + ctx.literal(' OF ').sql(CommaNodeList(self._of)) + if self._nowait: + ctx.literal(' NOWAIT') + return ctx + + +def Case(predicate, expression_tuples, default=None): + clauses = [SQL('CASE')] + if predicate is not None: + clauses.append(predicate) + for expr, value in expression_tuples: + clauses.extend((SQL('WHEN'), expr, SQL('THEN'), value)) + if default is not None: + clauses.extend((SQL('ELSE'), default)) + clauses.append(SQL('END')) + return NodeList(clauses) + + +class NodeList(ColumnBase): + def __init__(self, nodes, glue=' ', parens=False): + self.nodes = nodes + self.glue = glue + self.parens = parens + if parens and len(self.nodes) == 1 and \ + isinstance(self.nodes[0], Expression) and \ + not self.nodes[0].flat: + # Hack to avoid double-parentheses. + self.nodes = (self.nodes[0].clone(),) + self.nodes[0].flat = True + + def __sql__(self, ctx): + n_nodes = len(self.nodes) + if n_nodes == 0: + return ctx.literal('()') if self.parens else ctx + with ctx(parentheses=self.parens): + for i in range(n_nodes - 1): + ctx.sql(self.nodes[i]) + ctx.literal(self.glue) + ctx.sql(self.nodes[n_nodes - 1]) + return ctx + + +def CommaNodeList(nodes): + return NodeList(nodes, ', ') + + +def EnclosedNodeList(nodes): + return NodeList(nodes, ', ', True) + + +class _Namespace(Node): + __slots__ = ('_name',) + def __init__(self, name): + self._name = name + def __getattr__(self, attr): + return NamespaceAttribute(self, attr) + __getitem__ = __getattr__ + +class NamespaceAttribute(ColumnBase): + def __init__(self, namespace, attribute): + self._namespace = namespace + self._attribute = attribute + + def __sql__(self, ctx): + return (ctx + .literal(self._namespace._name + '.') + .sql(Entity(self._attribute))) + +EXCLUDED = _Namespace('EXCLUDED') + + +class DQ(ColumnBase): + def __init__(self, **query): + super(DQ, self).__init__() + self.query = query + self._negated = False + + @Node.copy + def __invert__(self): + self._negated = not self._negated + + def clone(self): + node = DQ(**self.query) + node._negated = self._negated + return node + +#: Represent a row tuple. +Tuple = lambda *a: EnclosedNodeList(a) + + +class QualifiedNames(WrappedNode): + def __sql__(self, ctx): + with ctx.scope_column(): + return ctx.sql(self.node) + + +def qualify_names(node): + # Search a node heirarchy to ensure that any column-like objects are + # referenced using fully-qualified names. + if isinstance(node, Expression): + return node.__class__(qualify_names(node.lhs), node.op, + qualify_names(node.rhs), node.flat) + elif isinstance(node, ColumnBase): + return QualifiedNames(node) + return node + + +class OnConflict(Node): + def __init__(self, action=None, update=None, preserve=None, where=None, + conflict_target=None, conflict_where=None, + conflict_constraint=None): + self._action = action + self._update = update + self._preserve = ensure_tuple(preserve) + self._where = where + if conflict_target is not None and conflict_constraint is not None: + raise ValueError('only one of "conflict_target" and ' + '"conflict_constraint" may be specified.') + self._conflict_target = ensure_tuple(conflict_target) + self._conflict_where = conflict_where + self._conflict_constraint = conflict_constraint + + def get_conflict_statement(self, ctx, query): + return ctx.state.conflict_statement(self, query) + + def get_conflict_update(self, ctx, query): + return ctx.state.conflict_update(self, query) + + @Node.copy + def preserve(self, *columns): + self._preserve = columns + + @Node.copy + def update(self, _data=None, **kwargs): + if _data and kwargs and not isinstance(_data, dict): + raise ValueError('Cannot mix data with keyword arguments in the ' + 'OnConflict update method.') + _data = _data or {} + if kwargs: + _data.update(kwargs) + self._update = _data + + @Node.copy + def where(self, *expressions): + if self._where is not None: + expressions = (self._where,) + expressions + self._where = reduce(operator.and_, expressions) + + @Node.copy + def conflict_target(self, *constraints): + self._conflict_constraint = None + self._conflict_target = constraints + + @Node.copy + def conflict_where(self, *expressions): + if self._conflict_where is not None: + expressions = (self._conflict_where,) + expressions + self._conflict_where = reduce(operator.and_, expressions) + + @Node.copy + def conflict_constraint(self, constraint): + self._conflict_constraint = constraint + self._conflict_target = None + + +def database_required(method): + @wraps(method) + def inner(self, database=None, *args, **kwargs): + database = self._database if database is None else database + if not database: + raise InterfaceError('Query must be bound to a database in order ' + 'to call "%s".' % method.__name__) + return method(self, database, *args, **kwargs) + return inner + +# BASE QUERY INTERFACE. + +class BaseQuery(Node): + default_row_type = ROW.DICT + + def __init__(self, _database=None, **kwargs): + self._database = _database + self._cursor_wrapper = None + self._row_type = None + self._constructor = None + super(BaseQuery, self).__init__(**kwargs) + + def bind(self, database=None): + self._database = database + return self + + def clone(self): + query = super(BaseQuery, self).clone() + query._cursor_wrapper = None + return query + + @Node.copy + def dicts(self, as_dict=True): + self._row_type = ROW.DICT if as_dict else None + return self + + @Node.copy + def tuples(self, as_tuple=True): + self._row_type = ROW.TUPLE if as_tuple else None + return self + + @Node.copy + def namedtuples(self, as_namedtuple=True): + self._row_type = ROW.NAMED_TUPLE if as_namedtuple else None + return self + + @Node.copy + def objects(self, constructor=None): + self._row_type = ROW.CONSTRUCTOR if constructor else None + self._constructor = constructor + return self + + def _get_cursor_wrapper(self, cursor): + row_type = self._row_type or self.default_row_type + + if row_type == ROW.DICT: + return DictCursorWrapper(cursor) + elif row_type == ROW.TUPLE: + return CursorWrapper(cursor) + elif row_type == ROW.NAMED_TUPLE: + return NamedTupleCursorWrapper(cursor) + elif row_type == ROW.CONSTRUCTOR: + return ObjectCursorWrapper(cursor, self._constructor) + else: + raise ValueError('Unrecognized row type: "%s".' % row_type) + + def __sql__(self, ctx): + raise NotImplementedError + + def sql(self): + if self._database: + context = self._database.get_sql_context() + else: + context = Context() + return context.parse(self) + + @database_required + def execute(self, database): + return self._execute(database) + + def _execute(self, database): + raise NotImplementedError + + def iterator(self, database=None): + return iter(self.execute(database).iterator()) + + def _ensure_execution(self): + if not self._cursor_wrapper: + if not self._database: + raise ValueError('Query has not been executed.') + self.execute() + + def __iter__(self): + self._ensure_execution() + return iter(self._cursor_wrapper) + + def __getitem__(self, value): + self._ensure_execution() + if isinstance(value, slice): + index = value.stop + else: + index = value + if index is not None: + index = index + 1 if index >= 0 else 0 + self._cursor_wrapper.fill_cache(index) + return self._cursor_wrapper.row_cache[value] + + def __len__(self): + self._ensure_execution() + return len(self._cursor_wrapper) + + def __str__(self): + return query_to_string(self) + + +class RawQuery(BaseQuery): + def __init__(self, sql=None, params=None, **kwargs): + super(RawQuery, self).__init__(**kwargs) + self._sql = sql + self._params = params + + def __sql__(self, ctx): + ctx.literal(self._sql) + if self._params: + for param in self._params: + ctx.value(param, add_param=False) + return ctx + + def _execute(self, database): + if self._cursor_wrapper is None: + cursor = database.execute(self) + self._cursor_wrapper = self._get_cursor_wrapper(cursor) + return self._cursor_wrapper + + +class Query(BaseQuery): + def __init__(self, where=None, order_by=None, limit=None, offset=None, + **kwargs): + super(Query, self).__init__(**kwargs) + self._where = where + self._order_by = order_by + self._limit = limit + self._offset = offset + + self._cte_list = None + + @Node.copy + def with_cte(self, *cte_list): + self._cte_list = cte_list + + @Node.copy + def where(self, *expressions): + if self._where is not None: + expressions = (self._where,) + expressions + self._where = reduce(operator.and_, expressions) + + @Node.copy + def orwhere(self, *expressions): + if self._where is not None: + expressions = (self._where,) + expressions + self._where = reduce(operator.or_, expressions) + + @Node.copy + def order_by(self, *values): + self._order_by = values + + @Node.copy + def order_by_extend(self, *values): + self._order_by = ((self._order_by or ()) + values) or None + + @Node.copy + def limit(self, value=None): + self._limit = value + + @Node.copy + def offset(self, value=None): + self._offset = value + + @Node.copy + def paginate(self, page, paginate_by=20): + if page > 0: + page -= 1 + self._limit = paginate_by + self._offset = page * paginate_by + + def _apply_ordering(self, ctx): + if self._order_by: + (ctx + .literal(' ORDER BY ') + .sql(CommaNodeList(self._order_by))) + if self._limit is not None or (self._offset is not None and + ctx.state.limit_max): + limit = ctx.state.limit_max if self._limit is None else self._limit + ctx.literal(' LIMIT ').sql(limit) + if self._offset is not None: + ctx.literal(' OFFSET ').sql(self._offset) + return ctx + + def __sql__(self, ctx): + if self._cte_list: + # The CTE scope is only used at the very beginning of the query, + # when we are describing the various CTEs we will be using. + recursive = any(cte._recursive for cte in self._cte_list) + + # Explicitly disable the "subquery" flag here, so as to avoid + # unnecessary parentheses around subsequent selects. + with ctx.scope_cte(subquery=False): + (ctx + .literal('WITH RECURSIVE ' if recursive else 'WITH ') + .sql(CommaNodeList(self._cte_list)) + .literal(' ')) + return ctx + + +def __compound_select__(operation, inverted=False): + @__bind_database__ + def method(self, other): + if inverted: + self, other = other, self + return CompoundSelectQuery(self, operation, other) + return method + + +class SelectQuery(Query): + union_all = __add__ = __compound_select__('UNION ALL') + union = __or__ = __compound_select__('UNION') + intersect = __and__ = __compound_select__('INTERSECT') + except_ = __sub__ = __compound_select__('EXCEPT') + __radd__ = __compound_select__('UNION ALL', inverted=True) + __ror__ = __compound_select__('UNION', inverted=True) + __rand__ = __compound_select__('INTERSECT', inverted=True) + __rsub__ = __compound_select__('EXCEPT', inverted=True) + + def select_from(self, *columns): + if not columns: + raise ValueError('select_from() must specify one or more columns.') + + query = (Select((self,), columns) + .bind(self._database)) + if getattr(self, 'model', None) is not None: + # Bind to the sub-select's model type, if defined. + query = query.objects(self.model) + return query + + +class SelectBase(_HashableSource, Source, SelectQuery): + def _get_hash(self): + return hash((self.__class__, self._alias or id(self))) + + def _execute(self, database): + if self._cursor_wrapper is None: + cursor = database.execute(self) + self._cursor_wrapper = self._get_cursor_wrapper(cursor) + return self._cursor_wrapper + + @database_required + def peek(self, database, n=1): + rows = self.execute(database)[:n] + if rows: + return rows[0] if n == 1 else rows + + @database_required + def first(self, database, n=1): + if self._limit != n: + self._limit = n + self._cursor_wrapper = None + return self.peek(database, n=n) + + @database_required + def scalar(self, database, as_tuple=False): + row = self.tuples().peek(database) + return row[0] if row and not as_tuple else row + + @database_required + def count(self, database, clear_limit=False): + clone = self.order_by().alias('_wrapped') + if clear_limit: + clone._limit = clone._offset = None + try: + if clone._having is None and clone._group_by is None and \ + clone._windows is None and clone._distinct is None and \ + clone._simple_distinct is not True: + clone = clone.select(SQL('1')) + except AttributeError: + pass + return Select([clone], [fn.COUNT(SQL('1'))]).scalar(database) + + @database_required + def exists(self, database): + clone = self.columns(SQL('1')) + clone._limit = 1 + clone._offset = None + return bool(clone.scalar()) + + @database_required + def get(self, database): + self._cursor_wrapper = None + try: + return self.execute(database)[0] + except IndexError: + pass + + +# QUERY IMPLEMENTATIONS. + + +class CompoundSelectQuery(SelectBase): + def __init__(self, lhs, op, rhs): + super(CompoundSelectQuery, self).__init__() + self.lhs = lhs + self.op = op + self.rhs = rhs + + @property + def _returning(self): + return self.lhs._returning + + @database_required + def exists(self, database): + query = Select((self.limit(1),), (SQL('1'),)).bind(database) + return bool(query.scalar()) + + def _get_query_key(self): + return (self.lhs.get_query_key(), self.rhs.get_query_key()) + + def _wrap_parens(self, ctx, subq): + csq_setting = ctx.state.compound_select_parentheses + + if not csq_setting or csq_setting == CSQ_PARENTHESES_NEVER: + return False + elif csq_setting == CSQ_PARENTHESES_ALWAYS: + return True + elif csq_setting == CSQ_PARENTHESES_UNNESTED: + if ctx.state.in_expr or ctx.state.in_function: + # If this compound select query is being used inside an + # expression, e.g., an IN or EXISTS(). + return False + + # If the query on the left or right is itself a compound select + # query, then we do not apply parentheses. However, if it is a + # regular SELECT query, we will apply parentheses. + return not isinstance(subq, CompoundSelectQuery) + + def __sql__(self, ctx): + if ctx.scope == SCOPE_COLUMN: + return self.apply_column(ctx) + + # Call parent method to handle any CTEs. + super(CompoundSelectQuery, self).__sql__(ctx) + + outer_parens = ctx.subquery or (ctx.scope == SCOPE_SOURCE) + with ctx(parentheses=outer_parens): + # Should the left-hand query be wrapped in parentheses? + lhs_parens = self._wrap_parens(ctx, self.lhs) + with ctx.scope_normal(parentheses=lhs_parens, subquery=False): + ctx.sql(self.lhs) + ctx.literal(' %s ' % self.op) + with ctx.push_alias(): + # Should the right-hand query be wrapped in parentheses? + rhs_parens = self._wrap_parens(ctx, self.rhs) + with ctx.scope_normal(parentheses=rhs_parens, subquery=False): + ctx.sql(self.rhs) + + # Apply ORDER BY, LIMIT, OFFSET. We use the "values" scope so that + # entity names are not fully-qualified. This is a bit of a hack, as + # we're relying on the logic in Column.__sql__() to not fully + # qualify column names. + with ctx.scope_values(): + self._apply_ordering(ctx) + + return self.apply_alias(ctx) + + +class Select(SelectBase): + def __init__(self, from_list=None, columns=None, group_by=None, + having=None, distinct=None, windows=None, for_update=None, + for_update_of=None, nowait=None, lateral=None, **kwargs): + super(Select, self).__init__(**kwargs) + self._from_list = (list(from_list) if isinstance(from_list, tuple) + else from_list) or [] + self._returning = columns + self._group_by = group_by + self._having = having + self._windows = None + self._for_update = for_update # XXX: consider reorganizing. + self._for_update_of = for_update_of + self._for_update_nowait = nowait + self._lateral = lateral + + self._distinct = self._simple_distinct = None + if distinct: + if isinstance(distinct, bool): + self._simple_distinct = distinct + else: + self._distinct = distinct + + self._cursor_wrapper = None + + def clone(self): + clone = super(Select, self).clone() + if clone._from_list: + clone._from_list = list(clone._from_list) + return clone + + @Node.copy + def columns(self, *columns, **kwargs): + self._returning = columns + select = columns + + @Node.copy + def select_extend(self, *columns): + self._returning = tuple(self._returning) + columns + + @Node.copy + def from_(self, *sources): + self._from_list = list(sources) + + @Node.copy + def join(self, dest, join_type=JOIN.INNER, on=None): + if not self._from_list: + raise ValueError('No sources to join on.') + item = self._from_list.pop() + self._from_list.append(Join(item, dest, join_type, on)) + + @Node.copy + def group_by(self, *columns): + grouping = [] + for column in columns: + if isinstance(column, Table): + if not column._columns: + raise ValueError('Cannot pass a table to group_by() that ' + 'does not have columns explicitly ' + 'declared.') + grouping.extend([getattr(column, col_name) + for col_name in column._columns]) + else: + grouping.append(column) + self._group_by = grouping + + def group_by_extend(self, *values): + """@Node.copy used from group_by() call""" + group_by = tuple(self._group_by or ()) + values + return self.group_by(*group_by) + + @Node.copy + def having(self, *expressions): + if self._having is not None: + expressions = (self._having,) + expressions + self._having = reduce(operator.and_, expressions) + + @Node.copy + def distinct(self, *columns): + if len(columns) == 1 and (columns[0] is True or columns[0] is False): + self._simple_distinct = columns[0] + else: + self._simple_distinct = False + self._distinct = columns + + @Node.copy + def window(self, *windows): + self._windows = windows if windows else None + + @Node.copy + def for_update(self, for_update=True, of=None, nowait=None): + if not for_update and (of is not None or nowait): + for_update = True + self._for_update = for_update + self._for_update_of = of + self._for_update_nowait = nowait + + @Node.copy + def lateral(self, lateral=True): + self._lateral = lateral + + def _get_query_key(self): + return self._alias + + def __sql_selection__(self, ctx, is_subquery=False): + return ctx.sql(CommaNodeList(self._returning)) + + def __sql__(self, ctx): + if ctx.scope == SCOPE_COLUMN: + return self.apply_column(ctx) + + if self._lateral and ctx.scope == SCOPE_SOURCE: + ctx.literal('LATERAL ') + + is_subquery = ctx.subquery + state = { + 'converter': None, + 'in_function': False, + 'parentheses': is_subquery or (ctx.scope == SCOPE_SOURCE), + 'subquery': True, + } + if ctx.state.in_function and ctx.state.function_arg_count == 1: + state['parentheses'] = False + + with ctx.scope_normal(**state): + # Defer calling parent SQL until here. This ensures that any CTEs + # for this query will be properly nested if this query is a + # sub-select or is used in an expression. See GH#1809 for example. + super(Select, self).__sql__(ctx) + + ctx.literal('SELECT ') + if self._simple_distinct or self._distinct is not None: + ctx.literal('DISTINCT ') + if self._distinct: + (ctx + .literal('ON ') + .sql(EnclosedNodeList(self._distinct)) + .literal(' ')) + + with ctx.scope_source(): + ctx = self.__sql_selection__(ctx, is_subquery) + + if self._from_list: + with ctx.scope_source(parentheses=False): + ctx.literal(' FROM ').sql(CommaNodeList(self._from_list)) + + if self._where is not None: + ctx.literal(' WHERE ').sql(self._where) + + if self._group_by: + ctx.literal(' GROUP BY ').sql(CommaNodeList(self._group_by)) + + if self._having is not None: + ctx.literal(' HAVING ').sql(self._having) + + if self._windows is not None: + ctx.literal(' WINDOW ') + ctx.sql(CommaNodeList(self._windows)) + + # Apply ORDER BY, LIMIT, OFFSET. + self._apply_ordering(ctx) + + if self._for_update: + if not ctx.state.for_update: + raise ValueError('FOR UPDATE specified but not supported ' + 'by database.') + ctx.literal(' ') + ctx.sql(ForUpdate(self._for_update, self._for_update_of, + self._for_update_nowait)) + + # If the subquery is inside a function -or- we are evaluating a + # subquery on either side of an expression w/o an explicit alias, do + # not generate an alias + AS clause. + if ctx.state.in_function or (ctx.state.in_expr and + self._alias is None): + return ctx + + return self.apply_alias(ctx) + + +class _WriteQuery(Query): + def __init__(self, table, returning=None, **kwargs): + self.table = table + self._returning = returning + self._return_cursor = True if returning else False + super(_WriteQuery, self).__init__(**kwargs) + + @Node.copy + def returning(self, *returning): + self._returning = returning + self._return_cursor = True if returning else False + + def apply_returning(self, ctx): + if self._returning: + with ctx.scope_source(): + ctx.literal(' RETURNING ').sql(CommaNodeList(self._returning)) + return ctx + + def _execute(self, database): + if self._returning: + cursor = self.execute_returning(database) + else: + cursor = database.execute(self) + return self.handle_result(database, cursor) + + def execute_returning(self, database): + if self._cursor_wrapper is None: + cursor = database.execute(self) + self._cursor_wrapper = self._get_cursor_wrapper(cursor) + return self._cursor_wrapper + + def handle_result(self, database, cursor): + if self._return_cursor: + return cursor + return database.rows_affected(cursor) + + def _set_table_alias(self, ctx): + ctx.alias_manager[self.table] = self.table.__name__ + + def __sql__(self, ctx): + super(_WriteQuery, self).__sql__(ctx) + # We explicitly set the table alias to the table's name, which ensures + # that if a sub-select references a column on the outer table, we won't + # assign it a new alias (e.g. t2) but will refer to it as table.column. + self._set_table_alias(ctx) + return ctx + + +class Update(_WriteQuery): + def __init__(self, table, update=None, **kwargs): + super(Update, self).__init__(table, **kwargs) + self._update = update + self._from = None + + @Node.copy + def from_(self, *sources): + self._from = sources + + def __sql__(self, ctx): + super(Update, self).__sql__(ctx) + + with ctx.scope_values(subquery=True): + ctx.literal('UPDATE ') + + expressions = [] + for k, v in sorted(self._update.items(), key=ctx.column_sort_key): + if not isinstance(v, Node): + if isinstance(k, Field): + v = k.to_value(v) + else: + v = Value(v, unpack=False) + elif isinstance(v, Model) and isinstance(k, ForeignKeyField): + # NB: we want to ensure that when passed a model instance + # in the context of a foreign-key, we apply the fk-specific + # adaptation of the model. + v = k.to_value(v) + + if not isinstance(v, Value): + v = qualify_names(v) + + expressions.append(NodeList((k, SQL('='), v))) + + (ctx + .sql(self.table) + .literal(' SET ') + .sql(CommaNodeList(expressions))) + + if self._from: + with ctx.scope_source(parentheses=False): + ctx.literal(' FROM ').sql(CommaNodeList(self._from)) + + if self._where: + with ctx.scope_normal(): + ctx.literal(' WHERE ').sql(self._where) + self._apply_ordering(ctx) + return self.apply_returning(ctx) + + +class Insert(_WriteQuery): + SIMPLE = 0 + QUERY = 1 + MULTI = 2 + class DefaultValuesException(Exception): pass + + def __init__(self, table, insert=None, columns=None, on_conflict=None, + **kwargs): + super(Insert, self).__init__(table, **kwargs) + self._insert = insert + self._columns = columns + self._on_conflict = on_conflict + self._query_type = None + + def where(self, *expressions): + raise NotImplementedError('INSERT queries cannot have a WHERE clause.') + + @Node.copy + def on_conflict_ignore(self, ignore=True): + self._on_conflict = OnConflict('IGNORE') if ignore else None + + @Node.copy + def on_conflict_replace(self, replace=True): + self._on_conflict = OnConflict('REPLACE') if replace else None + + @Node.copy + def on_conflict(self, *args, **kwargs): + self._on_conflict = (OnConflict(*args, **kwargs) if (args or kwargs) + else None) + + def _simple_insert(self, ctx): + if not self._insert: + raise self.DefaultValuesException('Error: no data to insert.') + return self._generate_insert((self._insert,), ctx) + + def get_default_data(self): + return {} + + def get_default_columns(self): + if self.table._columns: + return [getattr(self.table, col) for col in self.table._columns + if col != self.table._primary_key] + + def _generate_insert(self, insert, ctx): + rows_iter = iter(insert) + columns = self._columns + + # Load and organize column defaults (if provided). + defaults = self.get_default_data() + + # First figure out what columns are being inserted (if they weren't + # specified explicitly). Resulting columns are normalized and ordered. + if not columns: + try: + row = next(rows_iter) + except StopIteration: + raise self.DefaultValuesException('Error: no rows to insert.') + + if not isinstance(row, Mapping): + columns = self.get_default_columns() + if columns is None: + raise ValueError('Bulk insert must specify columns.') + else: + # Infer column names from the dict of data being inserted. + accum = [] + for column in row: + if isinstance(column, basestring): + column = getattr(self.table, column) + accum.append(column) + + # Add any columns present in the default data that are not + # accounted for by the dictionary of row data. + column_set = set(accum) + for col in (set(defaults) - column_set): + accum.append(col) + + columns = sorted(accum, key=lambda obj: obj.get_sort_key(ctx)) + rows_iter = itertools.chain(iter((row,)), rows_iter) + else: + clean_columns = [] + seen = set() + for column in columns: + if isinstance(column, basestring): + column_obj = getattr(self.table, column) + else: + column_obj = column + clean_columns.append(column_obj) + seen.add(column_obj) + + columns = clean_columns + for col in sorted(defaults, key=lambda obj: obj.get_sort_key(ctx)): + if col not in seen: + columns.append(col) + + fk_fields = set() + nullable_columns = set() + value_lookups = {} + for column in columns: + lookups = [column, column.name] + if isinstance(column, Field): + if column.name != column.column_name: + lookups.append(column.column_name) + if column.null: + nullable_columns.add(column) + if isinstance(column, ForeignKeyField): + fk_fields.add(column) + value_lookups[column] = lookups + + ctx.sql(EnclosedNodeList(columns)).literal(' VALUES ') + columns_converters = [ + (column, column.db_value if isinstance(column, Field) else None) + for column in columns] + + all_values = [] + for row in rows_iter: + values = [] + is_dict = isinstance(row, Mapping) + for i, (column, converter) in enumerate(columns_converters): + try: + if is_dict: + # The logic is a bit convoluted, but in order to be + # flexible in what we accept (dict keyed by + # column/field, field name, or underlying column name), + # we try accessing the row data dict using each + # possible key. If no match is found, throw an error. + for lookup in value_lookups[column]: + try: + val = row[lookup] + except KeyError: pass + else: break + else: + raise KeyError + else: + val = row[i] + except (KeyError, IndexError): + if column in defaults: + val = defaults[column] + if callable_(val): + val = val() + elif column in nullable_columns: + val = None + else: + raise ValueError('Missing value for %s.' % column.name) + + if not isinstance(val, Node) or (isinstance(val, Model) and + column in fk_fields): + val = Value(val, converter=converter, unpack=False) + values.append(val) + + all_values.append(EnclosedNodeList(values)) + + if not all_values: + raise self.DefaultValuesException('Error: no data to insert.') + + with ctx.scope_values(subquery=True): + return ctx.sql(CommaNodeList(all_values)) + + def _query_insert(self, ctx): + return (ctx + .sql(EnclosedNodeList(self._columns)) + .literal(' ') + .sql(self._insert)) + + def _default_values(self, ctx): + if not self._database: + return ctx.literal('DEFAULT VALUES') + return self._database.default_values_insert(ctx) + + def __sql__(self, ctx): + super(Insert, self).__sql__(ctx) + with ctx.scope_values(): + stmt = None + if self._on_conflict is not None: + stmt = self._on_conflict.get_conflict_statement(ctx, self) + + (ctx + .sql(stmt or SQL('INSERT')) + .literal(' INTO ') + .sql(self.table) + .literal(' ')) + + if isinstance(self._insert, Mapping) and not self._columns: + try: + self._simple_insert(ctx) + except self.DefaultValuesException: + self._default_values(ctx) + self._query_type = Insert.SIMPLE + elif isinstance(self._insert, (SelectQuery, SQL)): + self._query_insert(ctx) + self._query_type = Insert.QUERY + else: + self._generate_insert(self._insert, ctx) + self._query_type = Insert.MULTI + + if self._on_conflict is not None: + update = self._on_conflict.get_conflict_update(ctx, self) + if update is not None: + ctx.literal(' ').sql(update) + + return self.apply_returning(ctx) + + def _execute(self, database): + if self._returning is None and database.returning_clause \ + and self.table._primary_key: + self._returning = (self.table._primary_key,) + try: + return super(Insert, self)._execute(database) + except self.DefaultValuesException: + pass + + def handle_result(self, database, cursor): + if self._return_cursor: + return cursor + if self._query_type != Insert.SIMPLE and not self._returning: + return database.rows_affected(cursor) + return database.last_insert_id(cursor, self._query_type) + + +class Delete(_WriteQuery): + def __sql__(self, ctx): + super(Delete, self).__sql__(ctx) + + with ctx.scope_values(subquery=True): + ctx.literal('DELETE FROM ').sql(self.table) + if self._where is not None: + with ctx.scope_normal(): + ctx.literal(' WHERE ').sql(self._where) + + self._apply_ordering(ctx) + return self.apply_returning(ctx) + + +class Index(Node): + def __init__(self, name, table, expressions, unique=False, safe=False, + where=None, using=None): + self._name = name + self._table = Entity(table) if not isinstance(table, Table) else table + self._expressions = expressions + self._where = where + self._unique = unique + self._safe = safe + self._using = using + + @Node.copy + def safe(self, _safe=True): + self._safe = _safe + + @Node.copy + def where(self, *expressions): + if self._where is not None: + expressions = (self._where,) + expressions + self._where = reduce(operator.and_, expressions) + + @Node.copy + def using(self, _using=None): + self._using = _using + + def __sql__(self, ctx): + statement = 'CREATE UNIQUE INDEX ' if self._unique else 'CREATE INDEX ' + with ctx.scope_values(subquery=True): + ctx.literal(statement) + if self._safe: + ctx.literal('IF NOT EXISTS ') + + # Sqlite uses CREATE INDEX . ON , whereas most + # others use: CREATE INDEX ON .
. + if ctx.state.index_schema_prefix and \ + isinstance(self._table, Table) and self._table._schema: + index_name = Entity(self._table._schema, self._name) + table_name = Entity(self._table.__name__) + else: + index_name = Entity(self._name) + table_name = self._table + + ctx.sql(index_name) + if self._using is not None and \ + ctx.state.index_using_precedes_table: + ctx.literal(' USING %s' % self._using) # MySQL style. + + (ctx + .literal(' ON ') + .sql(table_name) + .literal(' ')) + + if self._using is not None and not \ + ctx.state.index_using_precedes_table: + ctx.literal('USING %s ' % self._using) # Postgres/default. + + ctx.sql(EnclosedNodeList([ + SQL(expr) if isinstance(expr, basestring) else expr + for expr in self._expressions])) + if self._where is not None: + ctx.literal(' WHERE ').sql(self._where) + + return ctx + + +class ModelIndex(Index): + def __init__(self, model, fields, unique=False, safe=True, where=None, + using=None, name=None): + self._model = model + if name is None: + name = self._generate_name_from_fields(model, fields) + if using is None: + for field in fields: + if isinstance(field, Field) and hasattr(field, 'index_type'): + using = field.index_type + super(ModelIndex, self).__init__( + name=name, + table=model._meta.table, + expressions=fields, + unique=unique, + safe=safe, + where=where, + using=using) + + def _generate_name_from_fields(self, model, fields): + accum = [] + for field in fields: + if isinstance(field, basestring): + accum.append(field.split()[0]) + else: + if isinstance(field, Node) and not isinstance(field, Field): + field = field.unwrap() + if isinstance(field, Field): + accum.append(field.column_name) + + if not accum: + raise ValueError('Unable to generate a name for the index, please ' + 'explicitly specify a name.') + + clean_field_names = re.sub(r'[^\w]+', '', '_'.join(accum)) + meta = model._meta + prefix = meta.name if meta.legacy_table_names else meta.table_name + return _truncate_constraint_name('_'.join((prefix, clean_field_names))) + + +def _truncate_constraint_name(constraint, maxlen=64): + if len(constraint) > maxlen: + name_hash = hashlib.md5(constraint.encode('utf-8')).hexdigest() + constraint = '%s_%s' % (constraint[:(maxlen - 8)], name_hash[:7]) + return constraint + + +# DB-API 2.0 EXCEPTIONS. + + +class PeeweeException(Exception): + def __init__(self, *args): + if args and isinstance(args[0], Exception): + self.orig, args = args[0], args[1:] + super(PeeweeException, self).__init__(*args) +class ImproperlyConfigured(PeeweeException): pass +class DatabaseError(PeeweeException): pass +class DataError(DatabaseError): pass +class IntegrityError(DatabaseError): pass +class InterfaceError(PeeweeException): pass +class InternalError(DatabaseError): pass +class NotSupportedError(DatabaseError): pass +class OperationalError(DatabaseError): pass +class ProgrammingError(DatabaseError): pass + + +class ExceptionWrapper(object): + __slots__ = ('exceptions',) + def __init__(self, exceptions): + self.exceptions = exceptions + def __enter__(self): pass + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is None: + return + # psycopg2.8 shits out a million cute error types. Try to catch em all. + if pg_errors is not None and exc_type.__name__ not in self.exceptions \ + and issubclass(exc_type, pg_errors.Error): + exc_type = exc_type.__bases__[0] + if exc_type.__name__ in self.exceptions: + new_type = self.exceptions[exc_type.__name__] + exc_args = exc_value.args + reraise(new_type, new_type(exc_value, *exc_args), traceback) + + +EXCEPTIONS = { + 'ConstraintError': IntegrityError, + 'DatabaseError': DatabaseError, + 'DataError': DataError, + 'IntegrityError': IntegrityError, + 'InterfaceError': InterfaceError, + 'InternalError': InternalError, + 'NotSupportedError': NotSupportedError, + 'OperationalError': OperationalError, + 'ProgrammingError': ProgrammingError, + 'TransactionRollbackError': OperationalError} + +__exception_wrapper__ = ExceptionWrapper(EXCEPTIONS) + + +# DATABASE INTERFACE AND CONNECTION MANAGEMENT. + + +IndexMetadata = collections.namedtuple( + 'IndexMetadata', + ('name', 'sql', 'columns', 'unique', 'table')) +ColumnMetadata = collections.namedtuple( + 'ColumnMetadata', + ('name', 'data_type', 'null', 'primary_key', 'table', 'default')) +ForeignKeyMetadata = collections.namedtuple( + 'ForeignKeyMetadata', + ('column', 'dest_table', 'dest_column', 'table')) +ViewMetadata = collections.namedtuple('ViewMetadata', ('name', 'sql')) + + +class _ConnectionState(object): + def __init__(self, **kwargs): + super(_ConnectionState, self).__init__(**kwargs) + self.reset() + + def reset(self): + self.closed = True + self.conn = None + self.ctx = [] + self.transactions = [] + + def set_connection(self, conn): + self.conn = conn + self.closed = False + self.ctx = [] + self.transactions = [] + + +class _ConnectionLocal(_ConnectionState, threading.local): pass +class _NoopLock(object): + __slots__ = () + def __enter__(self): return self + def __exit__(self, exc_type, exc_val, exc_tb): pass + + +class ConnectionContext(_callable_context_manager): + __slots__ = ('db',) + def __init__(self, db): self.db = db + def __enter__(self): + if self.db.is_closed(): + self.db.connect() + def __exit__(self, exc_type, exc_val, exc_tb): self.db.close() + + +class Database(_callable_context_manager): + context_class = Context + field_types = {} + operations = {} + param = '?' + quote = '""' + server_version = None + + # Feature toggles. + commit_select = False + compound_select_parentheses = CSQ_PARENTHESES_NEVER + for_update = False + index_schema_prefix = False + index_using_precedes_table = False + limit_max = None + nulls_ordering = False + returning_clause = False + safe_create_index = True + safe_drop_index = True + sequences = False + truncate_table = True + + def __init__(self, database, thread_safe=True, autorollback=False, + field_types=None, operations=None, autocommit=None, + autoconnect=True, **kwargs): + self._field_types = merge_dict(FIELD, self.field_types) + self._operations = merge_dict(OP, self.operations) + if field_types: + self._field_types.update(field_types) + if operations: + self._operations.update(operations) + + self.autoconnect = autoconnect + self.autorollback = autorollback + self.thread_safe = thread_safe + if thread_safe: + self._state = _ConnectionLocal() + self._lock = threading.RLock() + else: + self._state = _ConnectionState() + self._lock = _NoopLock() + + if autocommit is not None: + __deprecated__('Peewee no longer uses the "autocommit" option, as ' + 'the semantics now require it to always be True. ' + 'Because some database-drivers also use the ' + '"autocommit" parameter, you are receiving a ' + 'warning so you may update your code and remove ' + 'the parameter, as in the future, specifying ' + 'autocommit could impact the behavior of the ' + 'database driver you are using.') + + self.connect_params = {} + self.init(database, **kwargs) + + def init(self, database, **kwargs): + if not self.is_closed(): + self.close() + self.database = database + self.connect_params.update(kwargs) + self.deferred = not bool(database) + + def __enter__(self): + if self.is_closed(): + self.connect() + ctx = self.atomic() + self._state.ctx.append(ctx) + ctx.__enter__() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + ctx = self._state.ctx.pop() + try: + ctx.__exit__(exc_type, exc_val, exc_tb) + finally: + if not self._state.ctx: + self.close() + + def connection_context(self): + return ConnectionContext(self) + + def _connect(self): + raise NotImplementedError + + def connect(self, reuse_if_open=False): + with self._lock: + if self.deferred: + raise InterfaceError('Error, database must be initialized ' + 'before opening a connection.') + if not self._state.closed: + if reuse_if_open: + return False + raise OperationalError('Connection already opened.') + + self._state.reset() + with __exception_wrapper__: + self._state.set_connection(self._connect()) + if self.server_version is None: + self._set_server_version(self._state.conn) + self._initialize_connection(self._state.conn) + return True + + def _initialize_connection(self, conn): + pass + + def _set_server_version(self, conn): + self.server_version = 0 + + def close(self): + with self._lock: + if self.deferred: + raise InterfaceError('Error, database must be initialized ' + 'before opening a connection.') + if self.in_transaction(): + raise OperationalError('Attempting to close database while ' + 'transaction is open.') + is_open = not self._state.closed + try: + if is_open: + with __exception_wrapper__: + self._close(self._state.conn) + finally: + self._state.reset() + return is_open + + def _close(self, conn): + conn.close() + + def is_closed(self): + return self._state.closed + + def is_connection_usable(self): + return not self._state.closed + + def connection(self): + if self.is_closed(): + self.connect() + return self._state.conn + + def cursor(self, commit=None): + if self.is_closed(): + if self.autoconnect: + self.connect() + else: + raise InterfaceError('Error, database connection not opened.') + return self._state.conn.cursor() + + def execute_sql(self, sql, params=None, commit=SENTINEL): + logger.debug((sql, params)) + if commit is SENTINEL: + if self.in_transaction(): + commit = False + elif self.commit_select: + commit = True + else: + commit = not sql[:6].lower().startswith('select') + + with __exception_wrapper__: + cursor = self.cursor(commit) + try: + cursor.execute(sql, params or ()) + except Exception: + if self.autorollback and not self.in_transaction(): + self.rollback() + raise + else: + if commit and not self.in_transaction(): + self.commit() + return cursor + + def execute(self, query, commit=SENTINEL, **context_options): + ctx = self.get_sql_context(**context_options) + sql, params = ctx.sql(query).query() + return self.execute_sql(sql, params, commit=commit) + + def get_context_options(self): + return { + 'field_types': self._field_types, + 'operations': self._operations, + 'param': self.param, + 'quote': self.quote, + 'compound_select_parentheses': self.compound_select_parentheses, + 'conflict_statement': self.conflict_statement, + 'conflict_update': self.conflict_update, + 'for_update': self.for_update, + 'index_schema_prefix': self.index_schema_prefix, + 'index_using_precedes_table': self.index_using_precedes_table, + 'limit_max': self.limit_max, + 'nulls_ordering': self.nulls_ordering, + } + + def get_sql_context(self, **context_options): + context = self.get_context_options() + if context_options: + context.update(context_options) + return self.context_class(**context) + + def conflict_statement(self, on_conflict, query): + raise NotImplementedError + + def conflict_update(self, on_conflict, query): + raise NotImplementedError + + def _build_on_conflict_update(self, on_conflict, query): + if on_conflict._conflict_target: + stmt = SQL('ON CONFLICT') + target = EnclosedNodeList([ + Entity(col) if isinstance(col, basestring) else col + for col in on_conflict._conflict_target]) + if on_conflict._conflict_where is not None: + target = NodeList([target, SQL('WHERE'), + on_conflict._conflict_where]) + else: + stmt = SQL('ON CONFLICT ON CONSTRAINT') + target = on_conflict._conflict_constraint + if isinstance(target, basestring): + target = Entity(target) + + updates = [] + if on_conflict._preserve: + for column in on_conflict._preserve: + excluded = NodeList((SQL('EXCLUDED'), ensure_entity(column)), + glue='.') + expression = NodeList((ensure_entity(column), SQL('='), + excluded)) + updates.append(expression) + + if on_conflict._update: + for k, v in on_conflict._update.items(): + if not isinstance(v, Node): + # Attempt to resolve string field-names to their respective + # field object, to apply data-type conversions. + if isinstance(k, basestring): + k = getattr(query.table, k) + if isinstance(k, Field): + v = k.to_value(v) + else: + v = Value(v, unpack=False) + else: + v = QualifiedNames(v) + updates.append(NodeList((ensure_entity(k), SQL('='), v))) + + parts = [stmt, target, SQL('DO UPDATE SET'), CommaNodeList(updates)] + if on_conflict._where: + parts.extend((SQL('WHERE'), QualifiedNames(on_conflict._where))) + + return NodeList(parts) + + def last_insert_id(self, cursor, query_type=None): + return cursor.lastrowid + + def rows_affected(self, cursor): + return cursor.rowcount + + def default_values_insert(self, ctx): + return ctx.literal('DEFAULT VALUES') + + def session_start(self): + with self._lock: + return self.transaction().__enter__() + + def session_commit(self): + with self._lock: + try: + txn = self.pop_transaction() + except IndexError: + return False + txn.commit(begin=self.in_transaction()) + return True + + def session_rollback(self): + with self._lock: + try: + txn = self.pop_transaction() + except IndexError: + return False + txn.rollback(begin=self.in_transaction()) + return True + + def in_transaction(self): + return bool(self._state.transactions) + + def push_transaction(self, transaction): + self._state.transactions.append(transaction) + + def pop_transaction(self): + return self._state.transactions.pop() + + def transaction_depth(self): + return len(self._state.transactions) + + def top_transaction(self): + if self._state.transactions: + return self._state.transactions[-1] + + def atomic(self, *args, **kwargs): + return _atomic(self, *args, **kwargs) + + def manual_commit(self): + return _manual(self) + + def transaction(self, *args, **kwargs): + return _transaction(self, *args, **kwargs) + + def savepoint(self): + return _savepoint(self) + + def begin(self): + if self.is_closed(): + self.connect() + + def commit(self): + with __exception_wrapper__: + return self._state.conn.commit() + + def rollback(self): + with __exception_wrapper__: + return self._state.conn.rollback() + + def batch_commit(self, it, n): + for group in chunked(it, n): + with self.atomic(): + for obj in group: + yield obj + + def table_exists(self, table_name, schema=None): + return table_name in self.get_tables(schema=schema) + + def get_tables(self, schema=None): + raise NotImplementedError + + def get_indexes(self, table, schema=None): + raise NotImplementedError + + def get_columns(self, table, schema=None): + raise NotImplementedError + + def get_primary_keys(self, table, schema=None): + raise NotImplementedError + + def get_foreign_keys(self, table, schema=None): + raise NotImplementedError + + def sequence_exists(self, seq): + raise NotImplementedError + + def create_tables(self, models, **options): + for model in sort_models(models): + model.create_table(**options) + + def drop_tables(self, models, **kwargs): + for model in reversed(sort_models(models)): + model.drop_table(**kwargs) + + def extract_date(self, date_part, date_field): + raise NotImplementedError + + def truncate_date(self, date_part, date_field): + raise NotImplementedError + + def to_timestamp(self, date_field): + raise NotImplementedError + + def from_timestamp(self, date_field): + raise NotImplementedError + + def random(self): + return fn.random() + + def bind(self, models, bind_refs=True, bind_backrefs=True): + for model in models: + model.bind(self, bind_refs=bind_refs, bind_backrefs=bind_backrefs) + + def bind_ctx(self, models, bind_refs=True, bind_backrefs=True): + return _BoundModelsContext(models, self, bind_refs, bind_backrefs) + + def get_noop_select(self, ctx): + return ctx.sql(Select().columns(SQL('0')).where(SQL('0'))) + + +def __pragma__(name): + def __get__(self): + return self.pragma(name) + def __set__(self, value): + return self.pragma(name, value) + return property(__get__, __set__) + + +class SqliteDatabase(Database): + field_types = { + 'BIGAUTO': FIELD.AUTO, + 'BIGINT': FIELD.INT, + 'BOOL': FIELD.INT, + 'DOUBLE': FIELD.FLOAT, + 'SMALLINT': FIELD.INT, + 'UUID': FIELD.TEXT} + operations = { + 'LIKE': 'GLOB', + 'ILIKE': 'LIKE'} + index_schema_prefix = True + limit_max = -1 + server_version = __sqlite_version__ + truncate_table = False + + def __init__(self, database, *args, **kwargs): + self._pragmas = kwargs.pop('pragmas', ()) + super(SqliteDatabase, self).__init__(database, *args, **kwargs) + self._aggregates = {} + self._collations = {} + self._functions = {} + self._window_functions = {} + self._table_functions = [] + self._extensions = set() + self._attached = {} + self.register_function(_sqlite_date_part, 'date_part', 2) + self.register_function(_sqlite_date_trunc, 'date_trunc', 2) + self.nulls_ordering = self.server_version >= (3, 30, 0) + + def init(self, database, pragmas=None, timeout=5, **kwargs): + if pragmas is not None: + self._pragmas = pragmas + if isinstance(self._pragmas, dict): + self._pragmas = list(self._pragmas.items()) + self._timeout = timeout + super(SqliteDatabase, self).init(database, **kwargs) + + def _set_server_version(self, conn): + pass + + def _connect(self): + if sqlite3 is None: + raise ImproperlyConfigured('SQLite driver not installed!') + conn = sqlite3.connect(self.database, timeout=self._timeout, + isolation_level=None, **self.connect_params) + try: + self._add_conn_hooks(conn) + except: + conn.close() + raise + return conn + + def _add_conn_hooks(self, conn): + if self._attached: + self._attach_databases(conn) + if self._pragmas: + self._set_pragmas(conn) + self._load_aggregates(conn) + self._load_collations(conn) + self._load_functions(conn) + if self.server_version >= (3, 25, 0): + self._load_window_functions(conn) + if self._table_functions: + for table_function in self._table_functions: + table_function.register(conn) + if self._extensions: + self._load_extensions(conn) + + def _set_pragmas(self, conn): + cursor = conn.cursor() + for pragma, value in self._pragmas: + cursor.execute('PRAGMA %s = %s;' % (pragma, value)) + cursor.close() + + def _attach_databases(self, conn): + cursor = conn.cursor() + for name, db in self._attached.items(): + cursor.execute('ATTACH DATABASE "%s" AS "%s"' % (db, name)) + cursor.close() + + def pragma(self, key, value=SENTINEL, permanent=False, schema=None): + if schema is not None: + key = '"%s".%s' % (schema, key) + sql = 'PRAGMA %s' % key + if value is not SENTINEL: + sql += ' = %s' % (value or 0) + if permanent: + pragmas = dict(self._pragmas or ()) + pragmas[key] = value + self._pragmas = list(pragmas.items()) + elif permanent: + raise ValueError('Cannot specify a permanent pragma without value') + row = self.execute_sql(sql).fetchone() + if row: + return row[0] + + cache_size = __pragma__('cache_size') + foreign_keys = __pragma__('foreign_keys') + journal_mode = __pragma__('journal_mode') + journal_size_limit = __pragma__('journal_size_limit') + mmap_size = __pragma__('mmap_size') + page_size = __pragma__('page_size') + read_uncommitted = __pragma__('read_uncommitted') + synchronous = __pragma__('synchronous') + wal_autocheckpoint = __pragma__('wal_autocheckpoint') + + @property + def timeout(self): + return self._timeout + + @timeout.setter + def timeout(self, seconds): + if self._timeout == seconds: + return + + self._timeout = seconds + if not self.is_closed(): + # PySQLite multiplies user timeout by 1000, but the unit of the + # timeout PRAGMA is actually milliseconds. + self.execute_sql('PRAGMA busy_timeout=%d;' % (seconds * 1000)) + + def _load_aggregates(self, conn): + for name, (klass, num_params) in self._aggregates.items(): + conn.create_aggregate(name, num_params, klass) + + def _load_collations(self, conn): + for name, fn in self._collations.items(): + conn.create_collation(name, fn) + + def _load_functions(self, conn): + for name, (fn, num_params) in self._functions.items(): + conn.create_function(name, num_params, fn) + + def _load_window_functions(self, conn): + for name, (klass, num_params) in self._window_functions.items(): + conn.create_window_function(name, num_params, klass) + + def register_aggregate(self, klass, name=None, num_params=-1): + self._aggregates[name or klass.__name__.lower()] = (klass, num_params) + if not self.is_closed(): + self._load_aggregates(self.connection()) + + def aggregate(self, name=None, num_params=-1): + def decorator(klass): + self.register_aggregate(klass, name, num_params) + return klass + return decorator + + def register_collation(self, fn, name=None): + name = name or fn.__name__ + def _collation(*args): + expressions = args + (SQL('collate %s' % name),) + return NodeList(expressions) + fn.collation = _collation + self._collations[name] = fn + if not self.is_closed(): + self._load_collations(self.connection()) + + def collation(self, name=None): + def decorator(fn): + self.register_collation(fn, name) + return fn + return decorator + + def register_function(self, fn, name=None, num_params=-1): + self._functions[name or fn.__name__] = (fn, num_params) + if not self.is_closed(): + self._load_functions(self.connection()) + + def func(self, name=None, num_params=-1): + def decorator(fn): + self.register_function(fn, name, num_params) + return fn + return decorator + + def register_window_function(self, klass, name=None, num_params=-1): + name = name or klass.__name__.lower() + self._window_functions[name] = (klass, num_params) + if not self.is_closed(): + self._load_window_functions(self.connection()) + + def window_function(self, name=None, num_params=-1): + def decorator(klass): + self.register_window_function(klass, name, num_params) + return klass + return decorator + + def register_table_function(self, klass, name=None): + if name is not None: + klass.name = name + self._table_functions.append(klass) + if not self.is_closed(): + klass.register(self.connection()) + + def table_function(self, name=None): + def decorator(klass): + self.register_table_function(klass, name) + return klass + return decorator + + def unregister_aggregate(self, name): + del(self._aggregates[name]) + + def unregister_collation(self, name): + del(self._collations[name]) + + def unregister_function(self, name): + del(self._functions[name]) + + def unregister_window_function(self, name): + del(self._window_functions[name]) + + def unregister_table_function(self, name): + for idx, klass in enumerate(self._table_functions): + if klass.name == name: + break + else: + return False + self._table_functions.pop(idx) + return True + + def _load_extensions(self, conn): + conn.enable_load_extension(True) + for extension in self._extensions: + conn.load_extension(extension) + + def load_extension(self, extension): + self._extensions.add(extension) + if not self.is_closed(): + conn = self.connection() + conn.enable_load_extension(True) + conn.load_extension(extension) + + def unload_extension(self, extension): + self._extensions.remove(extension) + + def attach(self, filename, name): + if name in self._attached: + if self._attached[name] == filename: + return False + raise OperationalError('schema "%s" already attached.' % name) + + self._attached[name] = filename + if not self.is_closed(): + self.execute_sql('ATTACH DATABASE "%s" AS "%s"' % (filename, name)) + return True + + def detach(self, name): + if name not in self._attached: + return False + + del self._attached[name] + if not self.is_closed(): + self.execute_sql('DETACH DATABASE "%s"' % name) + return True + + def begin(self, lock_type=None): + statement = 'BEGIN %s' % lock_type if lock_type else 'BEGIN' + self.execute_sql(statement, commit=False) + + def get_tables(self, schema=None): + schema = schema or 'main' + cursor = self.execute_sql('SELECT name FROM "%s".sqlite_master WHERE ' + 'type=? ORDER BY name' % schema, ('table',)) + return [row for row, in cursor.fetchall()] + + def get_views(self, schema=None): + sql = ('SELECT name, sql FROM "%s".sqlite_master WHERE type=? ' + 'ORDER BY name') % (schema or 'main') + return [ViewMetadata(*row) for row in self.execute_sql(sql, ('view',))] + + def get_indexes(self, table, schema=None): + schema = schema or 'main' + query = ('SELECT name, sql FROM "%s".sqlite_master ' + 'WHERE tbl_name = ? AND type = ? ORDER BY name') % schema + cursor = self.execute_sql(query, (table, 'index')) + index_to_sql = dict(cursor.fetchall()) + + # Determine which indexes have a unique constraint. + unique_indexes = set() + cursor = self.execute_sql('PRAGMA "%s".index_list("%s")' % + (schema, table)) + for row in cursor.fetchall(): + name = row[1] + is_unique = int(row[2]) == 1 + if is_unique: + unique_indexes.add(name) + + # Retrieve the indexed columns. + index_columns = {} + for index_name in sorted(index_to_sql): + cursor = self.execute_sql('PRAGMA "%s".index_info("%s")' % + (schema, index_name)) + index_columns[index_name] = [row[2] for row in cursor.fetchall()] + + return [ + IndexMetadata( + name, + index_to_sql[name], + index_columns[name], + name in unique_indexes, + table) + for name in sorted(index_to_sql)] + + def get_columns(self, table, schema=None): + cursor = self.execute_sql('PRAGMA "%s".table_info("%s")' % + (schema or 'main', table)) + return [ColumnMetadata(r[1], r[2], not r[3], bool(r[5]), table, r[4]) + for r in cursor.fetchall()] + + def get_primary_keys(self, table, schema=None): + cursor = self.execute_sql('PRAGMA "%s".table_info("%s")' % + (schema or 'main', table)) + return [row[1] for row in filter(lambda r: r[-1], cursor.fetchall())] + + def get_foreign_keys(self, table, schema=None): + cursor = self.execute_sql('PRAGMA "%s".foreign_key_list("%s")' % + (schema or 'main', table)) + return [ForeignKeyMetadata(row[3], row[2], row[4], table) + for row in cursor.fetchall()] + + def get_binary_type(self): + return sqlite3.Binary + + def conflict_statement(self, on_conflict, query): + action = on_conflict._action.lower() if on_conflict._action else '' + if action and action not in ('nothing', 'update'): + return SQL('INSERT OR %s' % on_conflict._action.upper()) + + def conflict_update(self, oc, query): + # Sqlite prior to 3.24.0 does not support Postgres-style upsert. + if self.server_version < (3, 24, 0) and \ + any((oc._preserve, oc._update, oc._where, oc._conflict_target, + oc._conflict_constraint)): + raise ValueError('SQLite does not support specifying which values ' + 'to preserve or update.') + + action = oc._action.lower() if oc._action else '' + if action and action not in ('nothing', 'update', ''): + return + + if action == 'nothing': + return SQL('ON CONFLICT DO NOTHING') + elif not oc._update and not oc._preserve: + raise ValueError('If you are not performing any updates (or ' + 'preserving any INSERTed values), then the ' + 'conflict resolution action should be set to ' + '"NOTHING".') + elif oc._conflict_constraint: + raise ValueError('SQLite does not support specifying named ' + 'constraints for conflict resolution.') + elif not oc._conflict_target: + raise ValueError('SQLite requires that a conflict target be ' + 'specified when doing an upsert.') + + return self._build_on_conflict_update(oc, query) + + def extract_date(self, date_part, date_field): + return fn.date_part(date_part, date_field, python_value=int) + + def truncate_date(self, date_part, date_field): + return fn.date_trunc(date_part, date_field, + python_value=simple_date_time) + + def to_timestamp(self, date_field): + return fn.strftime('%s', date_field).cast('integer') + + def from_timestamp(self, date_field): + return fn.datetime(date_field, 'unixepoch') + + +class PostgresqlDatabase(Database): + field_types = { + 'AUTO': 'SERIAL', + 'BIGAUTO': 'BIGSERIAL', + 'BLOB': 'BYTEA', + 'BOOL': 'BOOLEAN', + 'DATETIME': 'TIMESTAMP', + 'DECIMAL': 'NUMERIC', + 'DOUBLE': 'DOUBLE PRECISION', + 'UUID': 'UUID', + 'UUIDB': 'BYTEA'} + operations = {'REGEXP': '~', 'IREGEXP': '~*'} + param = '%s' + + commit_select = True + compound_select_parentheses = CSQ_PARENTHESES_ALWAYS + for_update = True + nulls_ordering = True + returning_clause = True + safe_create_index = False + sequences = True + + def init(self, database, register_unicode=True, encoding=None, + isolation_level=None, **kwargs): + self._register_unicode = register_unicode + self._encoding = encoding + self._isolation_level = isolation_level + super(PostgresqlDatabase, self).init(database, **kwargs) + + def _connect(self): + if psycopg2 is None: + raise ImproperlyConfigured('Postgres driver not installed!') + + # Handle connection-strings nicely, since psycopg2 will accept them, + # and they may be easier when lots of parameters are specified. + params = self.connect_params.copy() + if self.database.startswith('postgresql://'): + params.setdefault('dsn', self.database) + else: + params.setdefault('dbname', self.database) + + conn = psycopg2.connect(**params) + if self._register_unicode: + pg_extensions.register_type(pg_extensions.UNICODE, conn) + pg_extensions.register_type(pg_extensions.UNICODEARRAY, conn) + if self._encoding: + conn.set_client_encoding(self._encoding) + if self._isolation_level: + conn.set_isolation_level(self._isolation_level) + return conn + + def _set_server_version(self, conn): + self.server_version = conn.server_version + if self.server_version >= 90600: + self.safe_create_index = True + + def is_connection_usable(self): + if self._state.closed: + return False + + # Returns True if we are idle, running a command, or in an active + # connection. If the connection is in an error state or the connection + # is otherwise unusable, return False. + txn_status = self._state.conn.get_transaction_status() + return txn_status < pg_extensions.TRANSACTION_STATUS_INERROR + + def last_insert_id(self, cursor, query_type=None): + try: + return cursor if query_type != Insert.SIMPLE else cursor[0][0] + except (IndexError, KeyError, TypeError): + pass + + def get_tables(self, schema=None): + query = ('SELECT tablename FROM pg_catalog.pg_tables ' + 'WHERE schemaname = %s ORDER BY tablename') + cursor = self.execute_sql(query, (schema or 'public',)) + return [table for table, in cursor.fetchall()] + + def get_views(self, schema=None): + query = ('SELECT viewname, definition FROM pg_catalog.pg_views ' + 'WHERE schemaname = %s ORDER BY viewname') + cursor = self.execute_sql(query, (schema or 'public',)) + return [ViewMetadata(view_name, sql.strip(' \t;')) + for (view_name, sql) in cursor.fetchall()] + + def get_indexes(self, table, schema=None): + query = """ + SELECT + i.relname, idxs.indexdef, idx.indisunique, + array_to_string(ARRAY( + SELECT pg_get_indexdef(idx.indexrelid, k + 1, TRUE) + FROM generate_subscripts(idx.indkey, 1) AS k + ORDER BY k), ',') + FROM pg_catalog.pg_class AS t + INNER JOIN pg_catalog.pg_index AS idx ON t.oid = idx.indrelid + INNER JOIN pg_catalog.pg_class AS i ON idx.indexrelid = i.oid + INNER JOIN pg_catalog.pg_indexes AS idxs ON + (idxs.tablename = t.relname AND idxs.indexname = i.relname) + WHERE t.relname = %s AND t.relkind = %s AND idxs.schemaname = %s + ORDER BY idx.indisunique DESC, i.relname;""" + cursor = self.execute_sql(query, (table, 'r', schema or 'public')) + return [IndexMetadata(name, sql.rstrip(' ;'), columns.split(','), + is_unique, table) + for name, sql, is_unique, columns in cursor.fetchall()] + + def get_columns(self, table, schema=None): + query = """ + SELECT column_name, is_nullable, data_type, column_default + FROM information_schema.columns + WHERE table_name = %s AND table_schema = %s + ORDER BY ordinal_position""" + cursor = self.execute_sql(query, (table, schema or 'public')) + pks = set(self.get_primary_keys(table, schema)) + return [ColumnMetadata(name, dt, null == 'YES', name in pks, table, df) + for name, null, dt, df in cursor.fetchall()] + + def get_primary_keys(self, table, schema=None): + query = """ + SELECT kc.column_name + FROM information_schema.table_constraints AS tc + INNER JOIN information_schema.key_column_usage AS kc ON ( + tc.table_name = kc.table_name AND + tc.table_schema = kc.table_schema AND + tc.constraint_name = kc.constraint_name) + WHERE + tc.constraint_type = %s AND + tc.table_name = %s AND + tc.table_schema = %s""" + ctype = 'PRIMARY KEY' + cursor = self.execute_sql(query, (ctype, table, schema or 'public')) + return [pk for pk, in cursor.fetchall()] + + def get_foreign_keys(self, table, schema=None): + sql = """ + SELECT DISTINCT + kcu.column_name, ccu.table_name, ccu.column_name + FROM information_schema.table_constraints AS tc + JOIN information_schema.key_column_usage AS kcu + ON (tc.constraint_name = kcu.constraint_name AND + tc.constraint_schema = kcu.constraint_schema AND + tc.table_name = kcu.table_name AND + tc.table_schema = kcu.table_schema) + JOIN information_schema.constraint_column_usage AS ccu + ON (ccu.constraint_name = tc.constraint_name AND + ccu.constraint_schema = tc.constraint_schema) + WHERE + tc.constraint_type = 'FOREIGN KEY' AND + tc.table_name = %s AND + tc.table_schema = %s""" + cursor = self.execute_sql(sql, (table, schema or 'public')) + return [ForeignKeyMetadata(row[0], row[1], row[2], table) + for row in cursor.fetchall()] + + def sequence_exists(self, sequence): + res = self.execute_sql(""" + SELECT COUNT(*) FROM pg_class, pg_namespace + WHERE relkind='S' + AND pg_class.relnamespace = pg_namespace.oid + AND relname=%s""", (sequence,)) + return bool(res.fetchone()[0]) + + def get_binary_type(self): + return psycopg2.Binary + + def conflict_statement(self, on_conflict, query): + return + + def conflict_update(self, oc, query): + action = oc._action.lower() if oc._action else '' + if action in ('ignore', 'nothing'): + parts = [SQL('ON CONFLICT')] + if oc._conflict_target: + parts.append(EnclosedNodeList([ + Entity(col) if isinstance(col, basestring) else col + for col in oc._conflict_target])) + parts.append(SQL('DO NOTHING')) + return NodeList(parts) + elif action and action != 'update': + raise ValueError('The only supported actions for conflict ' + 'resolution with Postgresql are "ignore" or ' + '"update".') + elif not oc._update and not oc._preserve: + raise ValueError('If you are not performing any updates (or ' + 'preserving any INSERTed values), then the ' + 'conflict resolution action should be set to ' + '"IGNORE".') + elif not (oc._conflict_target or oc._conflict_constraint): + raise ValueError('Postgres requires that a conflict target be ' + 'specified when doing an upsert.') + + return self._build_on_conflict_update(oc, query) + + def extract_date(self, date_part, date_field): + return fn.EXTRACT(NodeList((date_part, SQL('FROM'), date_field))) + + def truncate_date(self, date_part, date_field): + return fn.DATE_TRUNC(date_part, date_field) + + def to_timestamp(self, date_field): + return self.extract_date('EPOCH', date_field) + + def from_timestamp(self, date_field): + # Ironically, here, Postgres means "to the Postgresql timestamp type". + return fn.to_timestamp(date_field) + + def get_noop_select(self, ctx): + return ctx.sql(Select().columns(SQL('0')).where(SQL('false'))) + + def set_time_zone(self, timezone): + self.execute_sql('set time zone "%s";' % timezone) + + +class MySQLDatabase(Database): + field_types = { + 'AUTO': 'INTEGER AUTO_INCREMENT', + 'BIGAUTO': 'BIGINT AUTO_INCREMENT', + 'BOOL': 'BOOL', + 'DECIMAL': 'NUMERIC', + 'DOUBLE': 'DOUBLE PRECISION', + 'FLOAT': 'FLOAT', + 'UUID': 'VARCHAR(40)', + 'UUIDB': 'VARBINARY(16)'} + operations = { + 'LIKE': 'LIKE BINARY', + 'ILIKE': 'LIKE', + 'REGEXP': 'REGEXP BINARY', + 'IREGEXP': 'REGEXP', + 'XOR': 'XOR'} + param = '%s' + quote = '``' + + commit_select = True + compound_select_parentheses = CSQ_PARENTHESES_UNNESTED + for_update = True + index_using_precedes_table = True + limit_max = 2 ** 64 - 1 + safe_create_index = False + safe_drop_index = False + sql_mode = 'PIPES_AS_CONCAT' + + def init(self, database, **kwargs): + params = { + 'charset': 'utf8', + 'sql_mode': self.sql_mode, + 'use_unicode': True} + params.update(kwargs) + if 'password' in params and mysql_passwd: + params['passwd'] = params.pop('password') + super(MySQLDatabase, self).init(database, **params) + + def _connect(self): + if mysql is None: + raise ImproperlyConfigured('MySQL driver not installed!') + conn = mysql.connect(db=self.database, **self.connect_params) + return conn + + def _set_server_version(self, conn): + try: + version_raw = conn.server_version + except AttributeError: + version_raw = conn.get_server_info() + self.server_version = self._extract_server_version(version_raw) + + def _extract_server_version(self, version): + version = version.lower() + if 'maria' in version: + match_obj = re.search(r'(1\d\.\d+\.\d+)', version) + else: + match_obj = re.search(r'(\d\.\d+\.\d+)', version) + if match_obj is not None: + return tuple(int(num) for num in match_obj.groups()[0].split('.')) + + warnings.warn('Unable to determine MySQL version: "%s"' % version) + return (0, 0, 0) # Unable to determine version! + + def default_values_insert(self, ctx): + return ctx.literal('() VALUES ()') + + def get_tables(self, schema=None): + query = ('SELECT table_name FROM information_schema.tables ' + 'WHERE table_schema = DATABASE() AND table_type != %s ' + 'ORDER BY table_name') + return [table for table, in self.execute_sql(query, ('VIEW',))] + + def get_views(self, schema=None): + query = ('SELECT table_name, view_definition ' + 'FROM information_schema.views ' + 'WHERE table_schema = DATABASE() ORDER BY table_name') + cursor = self.execute_sql(query) + return [ViewMetadata(*row) for row in cursor.fetchall()] + + def get_indexes(self, table, schema=None): + cursor = self.execute_sql('SHOW INDEX FROM `%s`' % table) + unique = set() + indexes = {} + for row in cursor.fetchall(): + if not row[1]: + unique.add(row[2]) + indexes.setdefault(row[2], []) + indexes[row[2]].append(row[4]) + return [IndexMetadata(name, None, indexes[name], name in unique, table) + for name in indexes] + + def get_columns(self, table, schema=None): + sql = """ + SELECT column_name, is_nullable, data_type, column_default + FROM information_schema.columns + WHERE table_name = %s AND table_schema = DATABASE()""" + cursor = self.execute_sql(sql, (table,)) + pks = set(self.get_primary_keys(table)) + return [ColumnMetadata(name, dt, null == 'YES', name in pks, table, df) + for name, null, dt, df in cursor.fetchall()] + + def get_primary_keys(self, table, schema=None): + cursor = self.execute_sql('SHOW INDEX FROM `%s`' % table) + return [row[4] for row in + filter(lambda row: row[2] == 'PRIMARY', cursor.fetchall())] + + def get_foreign_keys(self, table, schema=None): + query = """ + SELECT column_name, referenced_table_name, referenced_column_name + FROM information_schema.key_column_usage + WHERE table_name = %s + AND table_schema = DATABASE() + AND referenced_table_name IS NOT NULL + AND referenced_column_name IS NOT NULL""" + cursor = self.execute_sql(query, (table,)) + return [ + ForeignKeyMetadata(column, dest_table, dest_column, table) + for column, dest_table, dest_column in cursor.fetchall()] + + def get_binary_type(self): + return mysql.Binary + + def conflict_statement(self, on_conflict, query): + if not on_conflict._action: return + + action = on_conflict._action.lower() + if action == 'replace': + return SQL('REPLACE') + elif action == 'ignore': + return SQL('INSERT IGNORE') + elif action != 'update': + raise ValueError('Un-supported action for conflict resolution. ' + 'MySQL supports REPLACE, IGNORE and UPDATE.') + + def conflict_update(self, on_conflict, query): + if on_conflict._where or on_conflict._conflict_target or \ + on_conflict._conflict_constraint: + raise ValueError('MySQL does not support the specification of ' + 'where clauses or conflict targets for conflict ' + 'resolution.') + + updates = [] + if on_conflict._preserve: + # Here we need to determine which function to use, which varies + # depending on the MySQL server version. MySQL and MariaDB prior to + # 10.3.3 use "VALUES", while MariaDB 10.3.3+ use "VALUE". + version = self.server_version or (0,) + if version[0] == 10 and version >= (10, 3, 3): + VALUE_FN = fn.VALUE + else: + VALUE_FN = fn.VALUES + + for column in on_conflict._preserve: + entity = ensure_entity(column) + expression = NodeList(( + ensure_entity(column), + SQL('='), + VALUE_FN(entity))) + updates.append(expression) + + if on_conflict._update: + for k, v in on_conflict._update.items(): + if not isinstance(v, Node): + # Attempt to resolve string field-names to their respective + # field object, to apply data-type conversions. + if isinstance(k, basestring): + k = getattr(query.table, k) + if isinstance(k, Field): + v = k.to_value(v) + else: + v = Value(v, unpack=False) + updates.append(NodeList((ensure_entity(k), SQL('='), v))) + + if updates: + return NodeList((SQL('ON DUPLICATE KEY UPDATE'), + CommaNodeList(updates))) + + def extract_date(self, date_part, date_field): + return fn.EXTRACT(NodeList((SQL(date_part), SQL('FROM'), date_field))) + + def truncate_date(self, date_part, date_field): + return fn.DATE_FORMAT(date_field, __mysql_date_trunc__[date_part], + python_value=simple_date_time) + + def to_timestamp(self, date_field): + return fn.UNIX_TIMESTAMP(date_field) + + def from_timestamp(self, date_field): + return fn.FROM_UNIXTIME(date_field) + + def random(self): + return fn.rand() + + def get_noop_select(self, ctx): + return ctx.literal('DO 0') + + +# TRANSACTION CONTROL. + + +class _manual(_callable_context_manager): + def __init__(self, db): + self.db = db + + def __enter__(self): + top = self.db.top_transaction() + if top is not None and not isinstance(top, _manual): + raise ValueError('Cannot enter manual commit block while a ' + 'transaction is active.') + self.db.push_transaction(self) + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.db.pop_transaction() is not self: + raise ValueError('Transaction stack corrupted while exiting ' + 'manual commit block.') + + +class _atomic(_callable_context_manager): + def __init__(self, db, *args, **kwargs): + self.db = db + self._transaction_args = (args, kwargs) + + def __enter__(self): + if self.db.transaction_depth() == 0: + args, kwargs = self._transaction_args + self._helper = self.db.transaction(*args, **kwargs) + elif isinstance(self.db.top_transaction(), _manual): + raise ValueError('Cannot enter atomic commit block while in ' + 'manual commit mode.') + else: + self._helper = self.db.savepoint() + return self._helper.__enter__() + + def __exit__(self, exc_type, exc_val, exc_tb): + return self._helper.__exit__(exc_type, exc_val, exc_tb) + + +class _transaction(_callable_context_manager): + def __init__(self, db, *args, **kwargs): + self.db = db + self._begin_args = (args, kwargs) + + def _begin(self): + args, kwargs = self._begin_args + self.db.begin(*args, **kwargs) + + def commit(self, begin=True): + self.db.commit() + if begin: + self._begin() + + def rollback(self, begin=True): + self.db.rollback() + if begin: + self._begin() + + def __enter__(self): + if self.db.transaction_depth() == 0: + self._begin() + self.db.push_transaction(self) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + try: + if exc_type: + self.rollback(False) + elif self.db.transaction_depth() == 1: + try: + self.commit(False) + except: + self.rollback(False) + raise + finally: + self.db.pop_transaction() + + +class _savepoint(_callable_context_manager): + def __init__(self, db, sid=None): + self.db = db + self.sid = sid or 's' + uuid.uuid4().hex + self.quoted_sid = self.sid.join(self.db.quote) + + def _begin(self): + self.db.execute_sql('SAVEPOINT %s;' % self.quoted_sid) + + def commit(self, begin=True): + self.db.execute_sql('RELEASE SAVEPOINT %s;' % self.quoted_sid) + if begin: self._begin() + + def rollback(self): + self.db.execute_sql('ROLLBACK TO SAVEPOINT %s;' % self.quoted_sid) + + def __enter__(self): + self._begin() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type: + self.rollback() + else: + try: + self.commit(begin=False) + except: + self.rollback() + raise + + +# CURSOR REPRESENTATIONS. + + +class CursorWrapper(object): + def __init__(self, cursor): + self.cursor = cursor + self.count = 0 + self.index = 0 + self.initialized = False + self.populated = False + self.row_cache = [] + + def __iter__(self): + if self.populated: + return iter(self.row_cache) + return ResultIterator(self) + + def __getitem__(self, item): + if isinstance(item, slice): + stop = item.stop + if stop is None or stop < 0: + self.fill_cache() + else: + self.fill_cache(stop) + return self.row_cache[item] + elif isinstance(item, int): + self.fill_cache(item if item > 0 else 0) + return self.row_cache[item] + else: + raise ValueError('CursorWrapper only supports integer and slice ' + 'indexes.') + + def __len__(self): + self.fill_cache() + return self.count + + def initialize(self): + pass + + def iterate(self, cache=True): + row = self.cursor.fetchone() + if row is None: + self.populated = True + self.cursor.close() + raise StopIteration + elif not self.initialized: + self.initialize() # Lazy initialization. + self.initialized = True + self.count += 1 + result = self.process_row(row) + if cache: + self.row_cache.append(result) + return result + + def process_row(self, row): + return row + + def iterator(self): + """Efficient one-pass iteration over the result set.""" + while True: + try: + yield self.iterate(False) + except StopIteration: + return + + def fill_cache(self, n=0): + n = n or float('Inf') + if n < 0: + raise ValueError('Negative values are not supported.') + + iterator = ResultIterator(self) + iterator.index = self.count + while not self.populated and (n > self.count): + try: + iterator.next() + except StopIteration: + break + + +class DictCursorWrapper(CursorWrapper): + def _initialize_columns(self): + description = self.cursor.description + self.columns = [t[0][t[0].rfind('.') + 1:].strip('()"`') + for t in description] + self.ncols = len(description) + + initialize = _initialize_columns + + def _row_to_dict(self, row): + result = {} + for i in range(self.ncols): + result.setdefault(self.columns[i], row[i]) # Do not overwrite. + return result + + process_row = _row_to_dict + + +class NamedTupleCursorWrapper(CursorWrapper): + def initialize(self): + description = self.cursor.description + self.tuple_class = collections.namedtuple('Row', [ + t[0][t[0].rfind('.') + 1:].strip('()"`') for t in description]) + + def process_row(self, row): + return self.tuple_class(*row) + + +class ObjectCursorWrapper(DictCursorWrapper): + def __init__(self, cursor, constructor): + super(ObjectCursorWrapper, self).__init__(cursor) + self.constructor = constructor + + def process_row(self, row): + row_dict = self._row_to_dict(row) + return self.constructor(**row_dict) + + +class ResultIterator(object): + def __init__(self, cursor_wrapper): + self.cursor_wrapper = cursor_wrapper + self.index = 0 + + def __iter__(self): + return self + + def next(self): + if self.index < self.cursor_wrapper.count: + obj = self.cursor_wrapper.row_cache[self.index] + elif not self.cursor_wrapper.populated: + self.cursor_wrapper.iterate() + obj = self.cursor_wrapper.row_cache[self.index] + else: + raise StopIteration + self.index += 1 + return obj + + __next__ = next + +# FIELDS + +class FieldAccessor(object): + def __init__(self, model, field, name): + self.model = model + self.field = field + self.name = name + + def __get__(self, instance, instance_type=None): + if instance is not None: + return instance.__data__.get(self.name) + return self.field + + def __set__(self, instance, value): + instance.__data__[self.name] = value + instance._dirty.add(self.name) + + +class ForeignKeyAccessor(FieldAccessor): + def __init__(self, model, field, name): + super(ForeignKeyAccessor, self).__init__(model, field, name) + self.rel_model = field.rel_model + + def get_rel_instance(self, instance): + value = instance.__data__.get(self.name) + if value is not None or self.name in instance.__rel__: + if self.name not in instance.__rel__ and self.field.lazy_load: + obj = self.rel_model.get(self.field.rel_field == value) + instance.__rel__[self.name] = obj + return instance.__rel__.get(self.name, value) + elif not self.field.null and self.field.lazy_load: + raise self.rel_model.DoesNotExist + return value + + def __get__(self, instance, instance_type=None): + if instance is not None: + return self.get_rel_instance(instance) + return self.field + + def __set__(self, instance, obj): + if isinstance(obj, self.rel_model): + instance.__data__[self.name] = getattr(obj, self.field.rel_field.name) + instance.__rel__[self.name] = obj + else: + fk_value = instance.__data__.get(self.name) + instance.__data__[self.name] = obj + if (obj != fk_value or obj is None) and \ + self.name in instance.__rel__: + del instance.__rel__[self.name] + instance._dirty.add(self.name) + + +class BackrefAccessor(object): + def __init__(self, field): + self.field = field + self.model = field.rel_model + self.rel_model = field.model + + def __get__(self, instance, instance_type=None): + if instance is not None: + dest = self.field.rel_field.name + return (self.rel_model + .select() + .where(self.field == getattr(instance, dest))) + return self + + +class ObjectIdAccessor(object): + """Gives direct access to the underlying id""" + def __init__(self, field): + self.field = field + + def __get__(self, instance, instance_type=None): + if instance is not None: + value = instance.__data__.get(self.field.name) + # Pull the object-id from the related object if it is not set. + if value is None and self.field.name in instance.__rel__: + rel_obj = instance.__rel__[self.field.name] + value = getattr(rel_obj, self.field.rel_field.name) + return value + return self.field + + def __set__(self, instance, value): + setattr(instance, self.field.name, value) + + +class Field(ColumnBase): + _field_counter = 0 + _order = 0 + accessor_class = FieldAccessor + auto_increment = False + default_index_type = None + field_type = 'DEFAULT' + unpack = True + + def __init__(self, null=False, index=False, unique=False, column_name=None, + default=None, primary_key=False, constraints=None, + sequence=None, collation=None, unindexed=False, choices=None, + help_text=None, verbose_name=None, index_type=None, + db_column=None, _hidden=False): + if db_column is not None: + __deprecated__('"db_column" has been deprecated in favor of ' + '"column_name" for Field objects.') + column_name = db_column + + self.null = null + self.index = index + self.unique = unique + self.column_name = column_name + self.default = default + self.primary_key = primary_key + self.constraints = constraints # List of column constraints. + self.sequence = sequence # Name of sequence, e.g. foo_id_seq. + self.collation = collation + self.unindexed = unindexed + self.choices = choices + self.help_text = help_text + self.verbose_name = verbose_name + self.index_type = index_type or self.default_index_type + self._hidden = _hidden + + # Used internally for recovering the order in which Fields were defined + # on the Model class. + Field._field_counter += 1 + self._order = Field._field_counter + self._sort_key = (self.primary_key and 1 or 2), self._order + + def __hash__(self): + return hash(self.name + '.' + self.model.__name__) + + def __repr__(self): + if hasattr(self, 'model') and getattr(self, 'name', None): + return '<%s: %s.%s>' % (type(self).__name__, + self.model.__name__, + self.name) + return '<%s: (unbound)>' % type(self).__name__ + + def bind(self, model, name, set_attribute=True): + self.model = model + self.name = self.safe_name = name + self.column_name = self.column_name or name + if set_attribute: + setattr(model, name, self.accessor_class(model, self, name)) + + @property + def column(self): + return Column(self.model._meta.table, self.column_name) + + def adapt(self, value): + return value + + def db_value(self, value): + return value if value is None else self.adapt(value) + + def python_value(self, value): + return value if value is None else self.adapt(value) + + def to_value(self, value): + return Value(value, self.db_value, unpack=False) + + def get_sort_key(self, ctx): + return self._sort_key + + def __sql__(self, ctx): + return ctx.sql(self.column) + + def get_modifiers(self): + pass + + def ddl_datatype(self, ctx): + if ctx and ctx.state.field_types: + column_type = ctx.state.field_types.get(self.field_type, + self.field_type) + else: + column_type = self.field_type + + modifiers = self.get_modifiers() + if column_type and modifiers: + modifier_literal = ', '.join([str(m) for m in modifiers]) + return SQL('%s(%s)' % (column_type, modifier_literal)) + else: + return SQL(column_type) + + def ddl(self, ctx): + accum = [Entity(self.column_name)] + data_type = self.ddl_datatype(ctx) + if data_type: + accum.append(data_type) + if self.unindexed: + accum.append(SQL('UNINDEXED')) + if not self.null: + accum.append(SQL('NOT NULL')) + if self.primary_key: + accum.append(SQL('PRIMARY KEY')) + if self.sequence: + accum.append(SQL("DEFAULT NEXTVAL('%s')" % self.sequence)) + if self.constraints: + accum.extend(self.constraints) + if self.collation: + accum.append(SQL('COLLATE %s' % self.collation)) + return NodeList(accum) + + +class AnyField(Field): + field_type = 'ANY' + + +class IntegerField(Field): + field_type = 'INT' + + def adapt(self, value): + try: + return int(value) + except ValueError: + return value + + +class BigIntegerField(IntegerField): + field_type = 'BIGINT' + + +class SmallIntegerField(IntegerField): + field_type = 'SMALLINT' + + +class AutoField(IntegerField): + auto_increment = True + field_type = 'AUTO' + + def __init__(self, *args, **kwargs): + if kwargs.get('primary_key') is False: + raise ValueError('%s must always be a primary key.' % type(self)) + kwargs['primary_key'] = True + super(AutoField, self).__init__(*args, **kwargs) + + +class BigAutoField(AutoField): + field_type = 'BIGAUTO' + + +class IdentityField(AutoField): + field_type = 'INT GENERATED BY DEFAULT AS IDENTITY' + + def __init__(self, generate_always=False, **kwargs): + if generate_always: + self.field_type = 'INT GENERATED ALWAYS AS IDENTITY' + super(IdentityField, self).__init__(**kwargs) + + +class PrimaryKeyField(AutoField): + def __init__(self, *args, **kwargs): + __deprecated__('"PrimaryKeyField" has been renamed to "AutoField". ' + 'Please update your code accordingly as this will be ' + 'completely removed in a subsequent release.') + super(PrimaryKeyField, self).__init__(*args, **kwargs) + + +class FloatField(Field): + field_type = 'FLOAT' + + def adapt(self, value): + try: + return float(value) + except ValueError: + return value + + +class DoubleField(FloatField): + field_type = 'DOUBLE' + + +class DecimalField(Field): + field_type = 'DECIMAL' + + def __init__(self, max_digits=10, decimal_places=5, auto_round=False, + rounding=None, *args, **kwargs): + self.max_digits = max_digits + self.decimal_places = decimal_places + self.auto_round = auto_round + self.rounding = rounding or decimal.DefaultContext.rounding + self._exp = decimal.Decimal(10) ** (-self.decimal_places) + super(DecimalField, self).__init__(*args, **kwargs) + + def get_modifiers(self): + return [self.max_digits, self.decimal_places] + + def db_value(self, value): + D = decimal.Decimal + if not value: + return value if value is None else D(0) + if self.auto_round: + decimal_value = D(text_type(value)) + return decimal_value.quantize(self._exp, rounding=self.rounding) + return value + + def python_value(self, value): + if value is not None: + if isinstance(value, decimal.Decimal): + return value + return decimal.Decimal(text_type(value)) + + +class _StringField(Field): + def adapt(self, value): + if isinstance(value, text_type): + return value + elif isinstance(value, bytes_type): + return value.decode('utf-8') + return text_type(value) + + def __add__(self, other): return StringExpression(self, OP.CONCAT, other) + def __radd__(self, other): return StringExpression(other, OP.CONCAT, self) + + +class CharField(_StringField): + field_type = 'VARCHAR' + + def __init__(self, max_length=255, *args, **kwargs): + self.max_length = max_length + super(CharField, self).__init__(*args, **kwargs) + + def get_modifiers(self): + return self.max_length and [self.max_length] or None + + +class FixedCharField(CharField): + field_type = 'CHAR' + + def python_value(self, value): + value = super(FixedCharField, self).python_value(value) + if value: + value = value.strip() + return value + + +class TextField(_StringField): + field_type = 'TEXT' + + +class BlobField(Field): + field_type = 'BLOB' + + def _db_hook(self, database): + if database is None: + self._constructor = bytearray + else: + self._constructor = database.get_binary_type() + + def bind(self, model, name, set_attribute=True): + self._constructor = bytearray + if model._meta.database: + if isinstance(model._meta.database, Proxy): + model._meta.database.attach_callback(self._db_hook) + else: + self._db_hook(model._meta.database) + + # Attach a hook to the model metadata; in the event the database is + # changed or set at run-time, we will be sure to apply our callback and + # use the proper data-type for our database driver. + model._meta._db_hooks.append(self._db_hook) + return super(BlobField, self).bind(model, name, set_attribute) + + def db_value(self, value): + if isinstance(value, text_type): + value = value.encode('raw_unicode_escape') + if isinstance(value, bytes_type): + return self._constructor(value) + return value + + +class BitField(BitwiseMixin, BigIntegerField): + def __init__(self, *args, **kwargs): + kwargs.setdefault('default', 0) + super(BitField, self).__init__(*args, **kwargs) + self.__current_flag = 1 + + def flag(self, value=None): + if value is None: + value = self.__current_flag + self.__current_flag <<= 1 + else: + self.__current_flag = value << 1 + + class FlagDescriptor(ColumnBase): + def __init__(self, field, value): + self._field = field + self._value = value + super(FlagDescriptor, self).__init__() + def clear(self): + return self._field.bin_and(~self._value) + def set(self): + return self._field.bin_or(self._value) + def __get__(self, instance, instance_type=None): + if instance is None: + return self + value = getattr(instance, self._field.name) or 0 + return (value & self._value) != 0 + def __set__(self, instance, is_set): + if is_set not in (True, False): + raise ValueError('Value must be either True or False') + value = getattr(instance, self._field.name) or 0 + if is_set: + value |= self._value + else: + value &= ~self._value + setattr(instance, self._field.name, value) + def __sql__(self, ctx): + return ctx.sql(self._field.bin_and(self._value) != 0) + return FlagDescriptor(self, value) + + +class BigBitFieldData(object): + def __init__(self, instance, name): + self.instance = instance + self.name = name + value = self.instance.__data__.get(self.name) + if not value: + value = bytearray() + elif not isinstance(value, bytearray): + value = bytearray(value) + self._buffer = self.instance.__data__[self.name] = value + + def _ensure_length(self, idx): + byte_num, byte_offset = divmod(idx, 8) + cur_size = len(self._buffer) + if cur_size <= byte_num: + self._buffer.extend(b'\x00' * ((byte_num + 1) - cur_size)) + return byte_num, byte_offset + + def set_bit(self, idx): + byte_num, byte_offset = self._ensure_length(idx) + self._buffer[byte_num] |= (1 << byte_offset) + + def clear_bit(self, idx): + byte_num, byte_offset = self._ensure_length(idx) + self._buffer[byte_num] &= ~(1 << byte_offset) + + def toggle_bit(self, idx): + byte_num, byte_offset = self._ensure_length(idx) + self._buffer[byte_num] ^= (1 << byte_offset) + return bool(self._buffer[byte_num] & (1 << byte_offset)) + + def is_set(self, idx): + byte_num, byte_offset = self._ensure_length(idx) + return bool(self._buffer[byte_num] & (1 << byte_offset)) + + def __repr__(self): + return repr(self._buffer) + + +class BigBitFieldAccessor(FieldAccessor): + def __get__(self, instance, instance_type=None): + if instance is None: + return self.field + return BigBitFieldData(instance, self.name) + def __set__(self, instance, value): + if isinstance(value, memoryview): + value = value.tobytes() + elif isinstance(value, buffer_type): + value = bytes(value) + elif isinstance(value, bytearray): + value = bytes_type(value) + elif isinstance(value, BigBitFieldData): + value = bytes_type(value._buffer) + elif isinstance(value, text_type): + value = value.encode('utf-8') + elif not isinstance(value, bytes_type): + raise ValueError('Value must be either a bytes, memoryview or ' + 'BigBitFieldData instance.') + super(BigBitFieldAccessor, self).__set__(instance, value) + + +class BigBitField(BlobField): + accessor_class = BigBitFieldAccessor + + def __init__(self, *args, **kwargs): + kwargs.setdefault('default', bytes_type) + super(BigBitField, self).__init__(*args, **kwargs) + + def db_value(self, value): + return bytes_type(value) if value is not None else value + + +class UUIDField(Field): + field_type = 'UUID' + + def db_value(self, value): + if isinstance(value, basestring) and len(value) == 32: + # Hex string. No transformation is necessary. + return value + elif isinstance(value, bytes) and len(value) == 16: + # Allow raw binary representation. + value = uuid.UUID(bytes=value) + if isinstance(value, uuid.UUID): + return value.hex + try: + return uuid.UUID(value).hex + except: + return value + + def python_value(self, value): + if isinstance(value, uuid.UUID): + return value + return uuid.UUID(value) if value is not None else None + + +class BinaryUUIDField(BlobField): + field_type = 'UUIDB' + + def db_value(self, value): + if isinstance(value, bytes) and len(value) == 16: + # Raw binary value. No transformation is necessary. + return self._constructor(value) + elif isinstance(value, basestring) and len(value) == 32: + # Allow hex string representation. + value = uuid.UUID(hex=value) + if isinstance(value, uuid.UUID): + return self._constructor(value.bytes) + elif value is not None: + raise ValueError('value for binary UUID field must be UUID(), ' + 'a hexadecimal string, or a bytes object.') + + def python_value(self, value): + if isinstance(value, uuid.UUID): + return value + elif isinstance(value, memoryview): + value = value.tobytes() + elif value and not isinstance(value, bytes): + value = bytes(value) + return uuid.UUID(bytes=value) if value is not None else None + + +def _date_part(date_part): + def dec(self): + return self.model._meta.database.extract_date(date_part, self) + return dec + +def format_date_time(value, formats, post_process=None): + post_process = post_process or (lambda x: x) + for fmt in formats: + try: + return post_process(datetime.datetime.strptime(value, fmt)) + except ValueError: + pass + return value + +def simple_date_time(value): + try: + return datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S') + except (TypeError, ValueError): + return value + + +class _BaseFormattedField(Field): + formats = None + + def __init__(self, formats=None, *args, **kwargs): + if formats is not None: + self.formats = formats + super(_BaseFormattedField, self).__init__(*args, **kwargs) + + +class DateTimeField(_BaseFormattedField): + field_type = 'DATETIME' + formats = [ + '%Y-%m-%d %H:%M:%S.%f', + '%Y-%m-%d %H:%M:%S', + '%Y-%m-%d', + ] + + def adapt(self, value): + if value and isinstance(value, basestring): + return format_date_time(value, self.formats) + return value + + def to_timestamp(self): + return self.model._meta.database.to_timestamp(self) + + def truncate(self, part): + return self.model._meta.database.truncate_date(part, self) + + year = property(_date_part('year')) + month = property(_date_part('month')) + day = property(_date_part('day')) + hour = property(_date_part('hour')) + minute = property(_date_part('minute')) + second = property(_date_part('second')) + + +class DateField(_BaseFormattedField): + field_type = 'DATE' + formats = [ + '%Y-%m-%d', + '%Y-%m-%d %H:%M:%S', + '%Y-%m-%d %H:%M:%S.%f', + ] + + def adapt(self, value): + if value and isinstance(value, basestring): + pp = lambda x: x.date() + return format_date_time(value, self.formats, pp) + elif value and isinstance(value, datetime.datetime): + return value.date() + return value + + def to_timestamp(self): + return self.model._meta.database.to_timestamp(self) + + def truncate(self, part): + return self.model._meta.database.truncate_date(part, self) + + year = property(_date_part('year')) + month = property(_date_part('month')) + day = property(_date_part('day')) + + +class TimeField(_BaseFormattedField): + field_type = 'TIME' + formats = [ + '%H:%M:%S.%f', + '%H:%M:%S', + '%H:%M', + '%Y-%m-%d %H:%M:%S.%f', + '%Y-%m-%d %H:%M:%S', + ] + + def adapt(self, value): + if value: + if isinstance(value, basestring): + pp = lambda x: x.time() + return format_date_time(value, self.formats, pp) + elif isinstance(value, datetime.datetime): + return value.time() + if value is not None and isinstance(value, datetime.timedelta): + return (datetime.datetime.min + value).time() + return value + + hour = property(_date_part('hour')) + minute = property(_date_part('minute')) + second = property(_date_part('second')) + + +def _timestamp_date_part(date_part): + def dec(self): + db = self.model._meta.database + expr = ((self / Value(self.resolution, converter=False)) + if self.resolution > 1 else self) + return db.extract_date(date_part, db.from_timestamp(expr)) + return dec + + +class TimestampField(BigIntegerField): + # Support second -> microsecond resolution. + valid_resolutions = [10**i for i in range(7)] + + def __init__(self, *args, **kwargs): + self.resolution = kwargs.pop('resolution', None) + + if not self.resolution: + self.resolution = 1 + elif self.resolution in range(2, 7): + self.resolution = 10 ** self.resolution + elif self.resolution not in self.valid_resolutions: + raise ValueError('TimestampField resolution must be one of: %s' % + ', '.join(str(i) for i in self.valid_resolutions)) + self.ticks_to_microsecond = 1000000 // self.resolution + + self.utc = kwargs.pop('utc', False) or False + dflt = datetime.datetime.utcnow if self.utc else datetime.datetime.now + kwargs.setdefault('default', dflt) + super(TimestampField, self).__init__(*args, **kwargs) + + def local_to_utc(self, dt): + # Convert naive local datetime into naive UTC, e.g.: + # 2019-03-01T12:00:00 (local=US/Central) -> 2019-03-01T18:00:00. + # 2019-05-01T12:00:00 (local=US/Central) -> 2019-05-01T17:00:00. + # 2019-03-01T12:00:00 (local=UTC) -> 2019-03-01T12:00:00. + return datetime.datetime(*time.gmtime(time.mktime(dt.timetuple()))[:6]) + + def utc_to_local(self, dt): + # Convert a naive UTC datetime into local time, e.g.: + # 2019-03-01T18:00:00 (local=US/Central) -> 2019-03-01T12:00:00. + # 2019-05-01T17:00:00 (local=US/Central) -> 2019-05-01T12:00:00. + # 2019-03-01T12:00:00 (local=UTC) -> 2019-03-01T12:00:00. + ts = calendar.timegm(dt.utctimetuple()) + return datetime.datetime.fromtimestamp(ts) + + def get_timestamp(self, value): + if self.utc: + # If utc-mode is on, then we assume all naive datetimes are in UTC. + return calendar.timegm(value.utctimetuple()) + else: + return time.mktime(value.timetuple()) + + def db_value(self, value): + if value is None: + return + + if isinstance(value, datetime.datetime): + pass + elif isinstance(value, datetime.date): + value = datetime.datetime(value.year, value.month, value.day) + else: + return int(round(value * self.resolution)) + + timestamp = self.get_timestamp(value) + if self.resolution > 1: + timestamp += (value.microsecond * .000001) + timestamp *= self.resolution + return int(round(timestamp)) + + def python_value(self, value): + if value is not None and isinstance(value, (int, float, long)): + if self.resolution > 1: + value, ticks = divmod(value, self.resolution) + microseconds = int(ticks * self.ticks_to_microsecond) + else: + microseconds = 0 + + if self.utc: + value = datetime.datetime.utcfromtimestamp(value) + else: + value = datetime.datetime.fromtimestamp(value) + + if microseconds: + value = value.replace(microsecond=microseconds) + + return value + + def from_timestamp(self): + expr = ((self / Value(self.resolution, converter=False)) + if self.resolution > 1 else self) + return self.model._meta.database.from_timestamp(expr) + + year = property(_timestamp_date_part('year')) + month = property(_timestamp_date_part('month')) + day = property(_timestamp_date_part('day')) + hour = property(_timestamp_date_part('hour')) + minute = property(_timestamp_date_part('minute')) + second = property(_timestamp_date_part('second')) + + +class IPField(BigIntegerField): + def db_value(self, val): + if val is not None: + return struct.unpack('!I', socket.inet_aton(val))[0] + + def python_value(self, val): + if val is not None: + return socket.inet_ntoa(struct.pack('!I', val)) + + +class BooleanField(Field): + field_type = 'BOOL' + adapt = bool + + +class BareField(Field): + def __init__(self, adapt=None, *args, **kwargs): + super(BareField, self).__init__(*args, **kwargs) + if adapt is not None: + self.adapt = adapt + + def ddl_datatype(self, ctx): + return + + +class ForeignKeyField(Field): + accessor_class = ForeignKeyAccessor + backref_accessor_class = BackrefAccessor + + def __init__(self, model, field=None, backref=None, on_delete=None, + on_update=None, deferrable=None, _deferred=None, + rel_model=None, to_field=None, object_id_name=None, + lazy_load=True, constraint_name=None, related_name=None, + *args, **kwargs): + kwargs.setdefault('index', True) + + super(ForeignKeyField, self).__init__(*args, **kwargs) + + if rel_model is not None: + __deprecated__('"rel_model" has been deprecated in favor of ' + '"model" for ForeignKeyField objects.') + model = rel_model + if to_field is not None: + __deprecated__('"to_field" has been deprecated in favor of ' + '"field" for ForeignKeyField objects.') + field = to_field + if related_name is not None: + __deprecated__('"related_name" has been deprecated in favor of ' + '"backref" for Field objects.') + backref = related_name + + self._is_self_reference = model == 'self' + self.rel_model = model + self.rel_field = field + self.declared_backref = backref + self.backref = None + self.on_delete = on_delete + self.on_update = on_update + self.deferrable = deferrable + self.deferred = _deferred + self.object_id_name = object_id_name + self.lazy_load = lazy_load + self.constraint_name = constraint_name + + @property + def field_type(self): + if not isinstance(self.rel_field, AutoField): + return self.rel_field.field_type + elif isinstance(self.rel_field, BigAutoField): + return BigIntegerField.field_type + return IntegerField.field_type + + def get_modifiers(self): + if not isinstance(self.rel_field, AutoField): + return self.rel_field.get_modifiers() + return super(ForeignKeyField, self).get_modifiers() + + def adapt(self, value): + return self.rel_field.adapt(value) + + def db_value(self, value): + if isinstance(value, self.rel_model): + value = getattr(value, self.rel_field.name) + return self.rel_field.db_value(value) + + def python_value(self, value): + if isinstance(value, self.rel_model): + return value + return self.rel_field.python_value(value) + + def bind(self, model, name, set_attribute=True): + if not self.column_name: + self.column_name = name if name.endswith('_id') else name + '_id' + if not self.object_id_name: + self.object_id_name = self.column_name + if self.object_id_name == name: + self.object_id_name += '_id' + elif self.object_id_name == name: + raise ValueError('ForeignKeyField "%s"."%s" specifies an ' + 'object_id_name that conflicts with its field ' + 'name.' % (model._meta.name, name)) + if self._is_self_reference: + self.rel_model = model + if isinstance(self.rel_field, basestring): + self.rel_field = getattr(self.rel_model, self.rel_field) + elif self.rel_field is None: + self.rel_field = self.rel_model._meta.primary_key + + # Bind field before assigning backref, so field is bound when + # calling declared_backref() (if callable). + super(ForeignKeyField, self).bind(model, name, set_attribute) + self.safe_name = self.object_id_name + + if callable_(self.declared_backref): + self.backref = self.declared_backref(self) + else: + self.backref, self.declared_backref = self.declared_backref, None + if not self.backref: + self.backref = '%s_set' % model._meta.name + + if set_attribute: + setattr(model, self.object_id_name, ObjectIdAccessor(self)) + if self.backref not in '!+': + setattr(self.rel_model, self.backref, + self.backref_accessor_class(self)) + + def foreign_key_constraint(self): + parts = [] + if self.constraint_name: + parts.extend((SQL('CONSTRAINT'), Entity(self.constraint_name))) + parts.extend([ + SQL('FOREIGN KEY'), + EnclosedNodeList((self,)), + SQL('REFERENCES'), + self.rel_model, + EnclosedNodeList((self.rel_field,))]) + if self.on_delete: + parts.append(SQL('ON DELETE %s' % self.on_delete)) + if self.on_update: + parts.append(SQL('ON UPDATE %s' % self.on_update)) + if self.deferrable: + parts.append(SQL('DEFERRABLE %s' % self.deferrable)) + return NodeList(parts) + + def __getattr__(self, attr): + if attr.startswith('__'): + # Prevent recursion error when deep-copying. + raise AttributeError('Cannot look-up non-existant "__" methods.') + if attr in self.rel_model._meta.fields: + return self.rel_model._meta.fields[attr] + raise AttributeError('Foreign-key has no attribute %s, nor is it a ' + 'valid field on the related model.' % attr) + + +class DeferredForeignKey(Field): + _unresolved = set() + + def __init__(self, rel_model_name, **kwargs): + self.field_kwargs = kwargs + self.rel_model_name = rel_model_name.lower() + DeferredForeignKey._unresolved.add(self) + super(DeferredForeignKey, self).__init__( + column_name=kwargs.get('column_name'), + null=kwargs.get('null'), + primary_key=kwargs.get('primary_key')) + + __hash__ = object.__hash__ + + def __deepcopy__(self, memo=None): + return DeferredForeignKey(self.rel_model_name, **self.field_kwargs) + + def set_model(self, rel_model): + field = ForeignKeyField(rel_model, _deferred=True, **self.field_kwargs) + if field.primary_key: + # NOTE: this calls add_field() under-the-hood. + self.model._meta.set_primary_key(self.name, field) + else: + self.model._meta.add_field(self.name, field) + + @staticmethod + def resolve(model_cls): + unresolved = sorted(DeferredForeignKey._unresolved, + key=operator.attrgetter('_order')) + for dr in unresolved: + if dr.rel_model_name == model_cls.__name__.lower(): + dr.set_model(model_cls) + DeferredForeignKey._unresolved.discard(dr) + + +class DeferredThroughModel(object): + def __init__(self): + self._refs = [] + + def set_field(self, model, field, name): + self._refs.append((model, field, name)) + + def set_model(self, through_model): + for src_model, m2mfield, name in self._refs: + m2mfield.through_model = through_model + src_model._meta.add_field(name, m2mfield) + + +class MetaField(Field): + column_name = default = model = name = None + primary_key = False + + +class ManyToManyFieldAccessor(FieldAccessor): + def __init__(self, model, field, name): + super(ManyToManyFieldAccessor, self).__init__(model, field, name) + self.model = field.model + self.rel_model = field.rel_model + self.through_model = field.through_model + src_fks = self.through_model._meta.model_refs[self.model] + dest_fks = self.through_model._meta.model_refs[self.rel_model] + if not src_fks: + raise ValueError('Cannot find foreign-key to "%s" on "%s" model.' % + (self.model, self.through_model)) + elif not dest_fks: + raise ValueError('Cannot find foreign-key to "%s" on "%s" model.' % + (self.rel_model, self.through_model)) + self.src_fk = src_fks[0] + self.dest_fk = dest_fks[0] + + def __get__(self, instance, instance_type=None, force_query=False): + if instance is not None: + if not force_query and self.src_fk.backref != '+': + backref = getattr(instance, self.src_fk.backref) + if isinstance(backref, list): + return [getattr(obj, self.dest_fk.name) for obj in backref] + + src_id = getattr(instance, self.src_fk.rel_field.name) + return (ManyToManyQuery(instance, self, self.rel_model) + .join(self.through_model) + .join(self.model) + .where(self.src_fk == src_id)) + + return self.field + + def __set__(self, instance, value): + query = self.__get__(instance, force_query=True) + query.add(value, clear_existing=True) + + +class ManyToManyField(MetaField): + accessor_class = ManyToManyFieldAccessor + + def __init__(self, model, backref=None, through_model=None, on_delete=None, + on_update=None, _is_backref=False): + if through_model is not None: + if not (isinstance(through_model, DeferredThroughModel) or + is_model(through_model)): + raise TypeError('Unexpected value for through_model. Expected ' + 'Model or DeferredThroughModel.') + if not _is_backref and (on_delete is not None or on_update is not None): + raise ValueError('Cannot specify on_delete or on_update when ' + 'through_model is specified.') + self.rel_model = model + self.backref = backref + self._through_model = through_model + self._on_delete = on_delete + self._on_update = on_update + self._is_backref = _is_backref + + def _get_descriptor(self): + return ManyToManyFieldAccessor(self) + + def bind(self, model, name, set_attribute=True): + if isinstance(self._through_model, DeferredThroughModel): + self._through_model.set_field(model, self, name) + return + + super(ManyToManyField, self).bind(model, name, set_attribute) + + if not self._is_backref: + many_to_many_field = ManyToManyField( + self.model, + backref=name, + through_model=self.through_model, + on_delete=self._on_delete, + on_update=self._on_update, + _is_backref=True) + self.backref = self.backref or model._meta.name + 's' + self.rel_model._meta.add_field(self.backref, many_to_many_field) + + def get_models(self): + return [model for _, model in sorted(( + (self._is_backref, self.model), + (not self._is_backref, self.rel_model)))] + + @property + def through_model(self): + if self._through_model is None: + self._through_model = self._create_through_model() + return self._through_model + + @through_model.setter + def through_model(self, value): + self._through_model = value + + def _create_through_model(self): + lhs, rhs = self.get_models() + tables = [model._meta.table_name for model in (lhs, rhs)] + + class Meta: + database = self.model._meta.database + schema = self.model._meta.schema + table_name = '%s_%s_through' % tuple(tables) + indexes = ( + ((lhs._meta.name, rhs._meta.name), + True),) + + params = {'on_delete': self._on_delete, 'on_update': self._on_update} + attrs = { + lhs._meta.name: ForeignKeyField(lhs, **params), + rhs._meta.name: ForeignKeyField(rhs, **params), + 'Meta': Meta} + + klass_name = '%s%sThrough' % (lhs.__name__, rhs.__name__) + return type(klass_name, (Model,), attrs) + + def get_through_model(self): + # XXX: Deprecated. Just use the "through_model" property. + return self.through_model + + +class VirtualField(MetaField): + field_class = None + + def __init__(self, field_class=None, *args, **kwargs): + Field = field_class if field_class is not None else self.field_class + self.field_instance = Field() if Field is not None else None + super(VirtualField, self).__init__(*args, **kwargs) + + def db_value(self, value): + if self.field_instance is not None: + return self.field_instance.db_value(value) + return value + + def python_value(self, value): + if self.field_instance is not None: + return self.field_instance.python_value(value) + return value + + def bind(self, model, name, set_attribute=True): + self.model = model + self.column_name = self.name = self.safe_name = name + setattr(model, name, self.accessor_class(model, self, name)) + + +class CompositeKey(MetaField): + sequence = None + + def __init__(self, *field_names): + self.field_names = field_names + self._safe_field_names = None + + @property + def safe_field_names(self): + if self._safe_field_names is None: + if self.model is None: + return self.field_names + + self._safe_field_names = [self.model._meta.fields[f].safe_name + for f in self.field_names] + return self._safe_field_names + + def __get__(self, instance, instance_type=None): + if instance is not None: + return tuple([getattr(instance, f) for f in self.safe_field_names]) + return self + + def __set__(self, instance, value): + if not isinstance(value, (list, tuple)): + raise TypeError('A list or tuple must be used to set the value of ' + 'a composite primary key.') + if len(value) != len(self.field_names): + raise ValueError('The length of the value must equal the number ' + 'of columns of the composite primary key.') + for idx, field_value in enumerate(value): + setattr(instance, self.field_names[idx], field_value) + + def __eq__(self, other): + expressions = [(self.model._meta.fields[field] == value) + for field, value in zip(self.field_names, other)] + return reduce(operator.and_, expressions) + + def __ne__(self, other): + return ~(self == other) + + def __hash__(self): + return hash((self.model.__name__, self.field_names)) + + def __sql__(self, ctx): + # If the composite PK is being selected, do not use parens. Elsewhere, + # such as in an expression, we want to use parentheses and treat it as + # a row value. + parens = ctx.scope != SCOPE_SOURCE + return ctx.sql(NodeList([self.model._meta.fields[field] + for field in self.field_names], ', ', parens)) + + def bind(self, model, name, set_attribute=True): + self.model = model + self.column_name = self.name = self.safe_name = name + setattr(model, self.name, self) + + +class _SortedFieldList(object): + __slots__ = ('_keys', '_items') + + def __init__(self): + self._keys = [] + self._items = [] + + def __getitem__(self, i): + return self._items[i] + + def __iter__(self): + return iter(self._items) + + def __contains__(self, item): + k = item._sort_key + i = bisect_left(self._keys, k) + j = bisect_right(self._keys, k) + return item in self._items[i:j] + + def index(self, field): + return self._keys.index(field._sort_key) + + def insert(self, item): + k = item._sort_key + i = bisect_left(self._keys, k) + self._keys.insert(i, k) + self._items.insert(i, item) + + def remove(self, item): + idx = self.index(item) + del self._items[idx] + del self._keys[idx] + + +# MODELS + + +class SchemaManager(object): + def __init__(self, model, database=None, **context_options): + self.model = model + self._database = database + context_options.setdefault('scope', SCOPE_VALUES) + self.context_options = context_options + + @property + def database(self): + db = self._database or self.model._meta.database + if db is None: + raise ImproperlyConfigured('database attribute does not appear to ' + 'be set on the model: %s' % self.model) + return db + + @database.setter + def database(self, value): + self._database = value + + def _create_context(self): + return self.database.get_sql_context(**self.context_options) + + def _create_table(self, safe=True, **options): + is_temp = options.pop('temporary', False) + ctx = self._create_context() + ctx.literal('CREATE TEMPORARY TABLE ' if is_temp else 'CREATE TABLE ') + if safe: + ctx.literal('IF NOT EXISTS ') + ctx.sql(self.model).literal(' ') + + columns = [] + constraints = [] + meta = self.model._meta + if meta.composite_key: + pk_columns = [meta.fields[field_name].column + for field_name in meta.primary_key.field_names] + constraints.append(NodeList((SQL('PRIMARY KEY'), + EnclosedNodeList(pk_columns)))) + + for field in meta.sorted_fields: + columns.append(field.ddl(ctx)) + if isinstance(field, ForeignKeyField) and not field.deferred: + constraints.append(field.foreign_key_constraint()) + + if meta.constraints: + constraints.extend(meta.constraints) + + constraints.extend(self._create_table_option_sql(options)) + ctx.sql(EnclosedNodeList(columns + constraints)) + + if meta.table_settings is not None: + table_settings = ensure_tuple(meta.table_settings) + for setting in table_settings: + if not isinstance(setting, basestring): + raise ValueError('table_settings must be strings') + ctx.literal(' ').literal(setting) + + extra_opts = [] + if meta.strict_tables: extra_opts.append('STRICT') + if meta.without_rowid: extra_opts.append('WITHOUT ROWID') + if extra_opts: + ctx.literal(' %s' % ', '.join(extra_opts)) + return ctx + + def _create_table_option_sql(self, options): + accum = [] + options = merge_dict(self.model._meta.options or {}, options) + if not options: + return accum + + for key, value in sorted(options.items()): + if not isinstance(value, Node): + if is_model(value): + value = value._meta.table + else: + value = SQL(str(value)) + accum.append(NodeList((SQL(key), value), glue='=')) + return accum + + def create_table(self, safe=True, **options): + self.database.execute(self._create_table(safe=safe, **options)) + + def _create_table_as(self, table_name, query, safe=True, **meta): + ctx = (self._create_context() + .literal('CREATE TEMPORARY TABLE ' + if meta.get('temporary') else 'CREATE TABLE ')) + if safe: + ctx.literal('IF NOT EXISTS ') + return (ctx + .sql(Entity(*ensure_tuple(table_name))) + .literal(' AS ') + .sql(query)) + + def create_table_as(self, table_name, query, safe=True, **meta): + ctx = self._create_table_as(table_name, query, safe=safe, **meta) + self.database.execute(ctx) + + def _drop_table(self, safe=True, **options): + ctx = (self._create_context() + .literal('DROP TABLE IF EXISTS ' if safe else 'DROP TABLE ') + .sql(self.model)) + if options.get('cascade'): + ctx = ctx.literal(' CASCADE') + elif options.get('restrict'): + ctx = ctx.literal(' RESTRICT') + return ctx + + def drop_table(self, safe=True, **options): + self.database.execute(self._drop_table(safe=safe, **options)) + + def _truncate_table(self, restart_identity=False, cascade=False): + db = self.database + if not db.truncate_table: + return (self._create_context() + .literal('DELETE FROM ').sql(self.model)) + + ctx = self._create_context().literal('TRUNCATE TABLE ').sql(self.model) + if restart_identity: + ctx = ctx.literal(' RESTART IDENTITY') + if cascade: + ctx = ctx.literal(' CASCADE') + return ctx + + def truncate_table(self, restart_identity=False, cascade=False): + self.database.execute(self._truncate_table(restart_identity, cascade)) + + def _create_indexes(self, safe=True): + return [self._create_index(index, safe) + for index in self.model._meta.fields_to_index()] + + def _create_index(self, index, safe=True): + if isinstance(index, Index): + if not self.database.safe_create_index: + index = index.safe(False) + elif index._safe != safe: + index = index.safe(safe) + return self._create_context().sql(index) + + def create_indexes(self, safe=True): + for query in self._create_indexes(safe=safe): + self.database.execute(query) + + def _drop_indexes(self, safe=True): + return [self._drop_index(index, safe) + for index in self.model._meta.fields_to_index() + if isinstance(index, Index)] + + def _drop_index(self, index, safe): + statement = 'DROP INDEX ' + if safe and self.database.safe_drop_index: + statement += 'IF EXISTS ' + if isinstance(index._table, Table) and index._table._schema: + index_name = Entity(index._table._schema, index._name) + else: + index_name = Entity(index._name) + return (self + ._create_context() + .literal(statement) + .sql(index_name)) + + def drop_indexes(self, safe=True): + for query in self._drop_indexes(safe=safe): + self.database.execute(query) + + def _check_sequences(self, field): + if not field.sequence or not self.database.sequences: + raise ValueError('Sequences are either not supported, or are not ' + 'defined for "%s".' % field.name) + + def _sequence_for_field(self, field): + if field.model._meta.schema: + return Entity(field.model._meta.schema, field.sequence) + else: + return Entity(field.sequence) + + def _create_sequence(self, field): + self._check_sequences(field) + if not self.database.sequence_exists(field.sequence): + return (self + ._create_context() + .literal('CREATE SEQUENCE ') + .sql(self._sequence_for_field(field))) + + def create_sequence(self, field): + seq_ctx = self._create_sequence(field) + if seq_ctx is not None: + self.database.execute(seq_ctx) + + def _drop_sequence(self, field): + self._check_sequences(field) + if self.database.sequence_exists(field.sequence): + return (self + ._create_context() + .literal('DROP SEQUENCE ') + .sql(self._sequence_for_field(field))) + + def drop_sequence(self, field): + seq_ctx = self._drop_sequence(field) + if seq_ctx is not None: + self.database.execute(seq_ctx) + + def _create_foreign_key(self, field): + name = 'fk_%s_%s_refs_%s' % (field.model._meta.table_name, + field.column_name, + field.rel_model._meta.table_name) + return (self + ._create_context() + .literal('ALTER TABLE ') + .sql(field.model) + .literal(' ADD CONSTRAINT ') + .sql(Entity(_truncate_constraint_name(name))) + .literal(' ') + .sql(field.foreign_key_constraint())) + + def create_foreign_key(self, field): + self.database.execute(self._create_foreign_key(field)) + + def create_sequences(self): + if self.database.sequences: + for field in self.model._meta.sorted_fields: + if field.sequence: + self.create_sequence(field) + + def create_all(self, safe=True, **table_options): + self.create_sequences() + self.create_table(safe, **table_options) + self.create_indexes(safe=safe) + + def drop_sequences(self): + if self.database.sequences: + for field in self.model._meta.sorted_fields: + if field.sequence: + self.drop_sequence(field) + + def drop_all(self, safe=True, drop_sequences=True, **options): + self.drop_table(safe, **options) + if drop_sequences: + self.drop_sequences() + + +class Metadata(object): + def __init__(self, model, database=None, table_name=None, indexes=None, + primary_key=None, constraints=None, schema=None, + only_save_dirty=False, depends_on=None, options=None, + db_table=None, table_function=None, table_settings=None, + without_rowid=False, temporary=False, strict_tables=None, + legacy_table_names=True, **kwargs): + if db_table is not None: + __deprecated__('"db_table" has been deprecated in favor of ' + '"table_name" for Models.') + table_name = db_table + self.model = model + self.database = database + + self.fields = {} + self.columns = {} + self.combined = {} + + self._sorted_field_list = _SortedFieldList() + self.sorted_fields = [] + self.sorted_field_names = [] + + self.defaults = {} + self._default_by_name = {} + self._default_dict = {} + self._default_callables = {} + self._default_callable_list = [] + + self.name = model.__name__.lower() + self.table_function = table_function + self.legacy_table_names = legacy_table_names + if not table_name: + table_name = (self.table_function(model) + if self.table_function + else self.make_table_name()) + self.table_name = table_name + self._table = None + + self.indexes = list(indexes) if indexes else [] + self.constraints = constraints + self._schema = schema + self.primary_key = primary_key + self.composite_key = self.auto_increment = None + self.only_save_dirty = only_save_dirty + self.depends_on = depends_on + self.table_settings = table_settings + self.without_rowid = without_rowid + self.strict_tables = strict_tables + self.temporary = temporary + + self.refs = {} + self.backrefs = {} + self.model_refs = collections.defaultdict(list) + self.model_backrefs = collections.defaultdict(list) + self.manytomany = {} + + self.options = options or {} + for key, value in kwargs.items(): + setattr(self, key, value) + self._additional_keys = set(kwargs.keys()) + + # Allow objects to register hooks that are called if the model is bound + # to a different database. For example, BlobField uses a different + # Python data-type depending on the db driver / python version. When + # the database changes, we need to update any BlobField so they can use + # the appropriate data-type. + self._db_hooks = [] + + def make_table_name(self): + if self.legacy_table_names: + return re.sub(r'[^\w]+', '_', self.name) + return make_snake_case(self.model.__name__) + + def model_graph(self, refs=True, backrefs=True, depth_first=True): + if not refs and not backrefs: + raise ValueError('One of `refs` or `backrefs` must be True.') + + accum = [(None, self.model, None)] + seen = set() + queue = collections.deque((self,)) + method = queue.pop if depth_first else queue.popleft + + while queue: + curr = method() + if curr in seen: continue + seen.add(curr) + + if refs: + for fk, model in curr.refs.items(): + accum.append((fk, model, False)) + queue.append(model._meta) + if backrefs: + for fk, model in curr.backrefs.items(): + accum.append((fk, model, True)) + queue.append(model._meta) + + return accum + + def add_ref(self, field): + rel = field.rel_model + self.refs[field] = rel + self.model_refs[rel].append(field) + rel._meta.backrefs[field] = self.model + rel._meta.model_backrefs[self.model].append(field) + + def remove_ref(self, field): + rel = field.rel_model + del self.refs[field] + self.model_refs[rel].remove(field) + del rel._meta.backrefs[field] + rel._meta.model_backrefs[self.model].remove(field) + + def add_manytomany(self, field): + self.manytomany[field.name] = field + + def remove_manytomany(self, field): + del self.manytomany[field.name] + + @property + def table(self): + if self._table is None: + self._table = Table( + self.table_name, + [field.column_name for field in self.sorted_fields], + schema=self.schema, + _model=self.model, + _database=self.database) + return self._table + + @table.setter + def table(self, value): + raise AttributeError('Cannot set the "table".') + + @table.deleter + def table(self): + self._table = None + + @property + def schema(self): + return self._schema + + @schema.setter + def schema(self, value): + self._schema = value + del self.table + + @property + def entity(self): + if self._schema: + return Entity(self._schema, self.table_name) + else: + return Entity(self.table_name) + + def _update_sorted_fields(self): + self.sorted_fields = list(self._sorted_field_list) + self.sorted_field_names = [f.name for f in self.sorted_fields] + + def get_rel_for_model(self, model): + if isinstance(model, ModelAlias): + model = model.model + forwardrefs = self.model_refs.get(model, []) + backrefs = self.model_backrefs.get(model, []) + return (forwardrefs, backrefs) + + def add_field(self, field_name, field, set_attribute=True): + if field_name in self.fields: + self.remove_field(field_name) + elif field_name in self.manytomany: + self.remove_manytomany(self.manytomany[field_name]) + + if not isinstance(field, MetaField): + del self.table + field.bind(self.model, field_name, set_attribute) + self.fields[field.name] = field + self.columns[field.column_name] = field + self.combined[field.name] = field + self.combined[field.column_name] = field + + self._sorted_field_list.insert(field) + self._update_sorted_fields() + + if field.default is not None: + # This optimization helps speed up model instance construction. + self.defaults[field] = field.default + if callable_(field.default): + self._default_callables[field] = field.default + self._default_callable_list.append((field.name, + field.default)) + else: + self._default_dict[field] = field.default + self._default_by_name[field.name] = field.default + else: + field.bind(self.model, field_name, set_attribute) + + if isinstance(field, ForeignKeyField): + self.add_ref(field) + elif isinstance(field, ManyToManyField) and field.name: + self.add_manytomany(field) + + def remove_field(self, field_name): + if field_name not in self.fields: + return + + del self.table + original = self.fields.pop(field_name) + del self.columns[original.column_name] + del self.combined[field_name] + try: + del self.combined[original.column_name] + except KeyError: + pass + self._sorted_field_list.remove(original) + self._update_sorted_fields() + + if original.default is not None: + del self.defaults[original] + if self._default_callables.pop(original, None): + for i, (name, _) in enumerate(self._default_callable_list): + if name == field_name: + self._default_callable_list.pop(i) + break + else: + self._default_dict.pop(original, None) + self._default_by_name.pop(original.name, None) + + if isinstance(original, ForeignKeyField): + self.remove_ref(original) + + def set_primary_key(self, name, field): + self.composite_key = isinstance(field, CompositeKey) + self.add_field(name, field) + self.primary_key = field + self.auto_increment = ( + field.auto_increment or + bool(field.sequence)) + + def get_primary_keys(self): + if self.composite_key: + return tuple([self.fields[field_name] + for field_name in self.primary_key.field_names]) + else: + return (self.primary_key,) if self.primary_key is not False else () + + def get_default_dict(self): + dd = self._default_by_name.copy() + for field_name, default in self._default_callable_list: + dd[field_name] = default() + return dd + + def fields_to_index(self): + indexes = [] + for f in self.sorted_fields: + if f.primary_key: + continue + if f.index or f.unique: + indexes.append(ModelIndex(self.model, (f,), unique=f.unique, + using=f.index_type)) + + for index_obj in self.indexes: + if isinstance(index_obj, Node): + indexes.append(index_obj) + elif isinstance(index_obj, (list, tuple)): + index_parts, unique = index_obj + fields = [] + for part in index_parts: + if isinstance(part, basestring): + fields.append(self.combined[part]) + elif isinstance(part, Node): + fields.append(part) + else: + raise ValueError('Expected either a field name or a ' + 'subclass of Node. Got: %s' % part) + indexes.append(ModelIndex(self.model, fields, unique=unique)) + + return indexes + + def set_database(self, database): + self.database = database + self.model._schema._database = database + del self.table + + # Apply any hooks that have been registered. + for hook in self._db_hooks: + hook(database) + + def set_table_name(self, table_name): + self.table_name = table_name + del self.table + + +class SubclassAwareMetadata(Metadata): + models = [] + + def __init__(self, model, *args, **kwargs): + super(SubclassAwareMetadata, self).__init__(model, *args, **kwargs) + self.models.append(model) + + def map_models(self, fn): + for model in self.models: + fn(model) + + +class DoesNotExist(Exception): pass + + +class ModelBase(type): + inheritable = set(['constraints', 'database', 'indexes', 'primary_key', + 'options', 'schema', 'table_function', 'temporary', + 'only_save_dirty', 'legacy_table_names', + 'table_settings', 'strict_tables']) + + def __new__(cls, name, bases, attrs): + if name == MODEL_BASE or bases[0].__name__ == MODEL_BASE: + return super(ModelBase, cls).__new__(cls, name, bases, attrs) + + meta_options = {} + meta = attrs.pop('Meta', None) + if meta: + for k, v in meta.__dict__.items(): + if not k.startswith('_'): + meta_options[k] = v + + pk = getattr(meta, 'primary_key', None) + pk_name = parent_pk = None + + # Inherit any field descriptors by deep copying the underlying field + # into the attrs of the new model, additionally see if the bases define + # inheritable model options and swipe them. + for b in bases: + if not hasattr(b, '_meta'): + continue + + base_meta = b._meta + if parent_pk is None: + parent_pk = deepcopy(base_meta.primary_key) + all_inheritable = cls.inheritable | base_meta._additional_keys + for k in base_meta.__dict__: + if k in all_inheritable and k not in meta_options: + meta_options[k] = base_meta.__dict__[k] + meta_options.setdefault('database', base_meta.database) + meta_options.setdefault('schema', base_meta.schema) + + for (k, v) in b.__dict__.items(): + if k in attrs: continue + + if isinstance(v, FieldAccessor) and not v.field.primary_key: + attrs[k] = deepcopy(v.field) + + sopts = meta_options.pop('schema_options', None) or {} + Meta = meta_options.get('model_metadata_class', Metadata) + Schema = meta_options.get('schema_manager_class', SchemaManager) + + # Construct the new class. + cls = super(ModelBase, cls).__new__(cls, name, bases, attrs) + cls.__data__ = cls.__rel__ = None + + cls._meta = Meta(cls, **meta_options) + cls._schema = Schema(cls, **sopts) + + fields = [] + for key, value in cls.__dict__.items(): + if isinstance(value, Field): + if value.primary_key and pk: + raise ValueError('over-determined primary key %s.' % name) + elif value.primary_key: + pk, pk_name = value, key + else: + fields.append((key, value)) + + if pk is None: + if parent_pk is not False: + pk, pk_name = ((parent_pk, parent_pk.name) + if parent_pk is not None else + (AutoField(), 'id')) + else: + pk = False + elif isinstance(pk, CompositeKey): + pk_name = '__composite_key__' + cls._meta.composite_key = True + + if pk is not False: + cls._meta.set_primary_key(pk_name, pk) + + for name, field in fields: + cls._meta.add_field(name, field) + + # Create a repr and error class before finalizing. + if hasattr(cls, '__str__') and '__repr__' not in attrs: + setattr(cls, '__repr__', lambda self: '<%s: %s>' % ( + cls.__name__, self.__str__())) + + exc_name = '%sDoesNotExist' % cls.__name__ + exc_attrs = {'__module__': cls.__module__} + exception_class = type(exc_name, (DoesNotExist,), exc_attrs) + cls.DoesNotExist = exception_class + + # Call validation hook, allowing additional model validation. + cls.validate_model() + DeferredForeignKey.resolve(cls) + return cls + + def __repr__(self): + return '' % self.__name__ + + def __iter__(self): + return iter(self.select()) + + def __getitem__(self, key): + return self.get_by_id(key) + + def __setitem__(self, key, value): + self.set_by_id(key, value) + + def __delitem__(self, key): + self.delete_by_id(key) + + def __contains__(self, key): + try: + self.get_by_id(key) + except self.DoesNotExist: + return False + else: + return True + + def __len__(self): + return self.select().count() + def __bool__(self): return True + __nonzero__ = __bool__ # Python 2. + + def __sql__(self, ctx): + return ctx.sql(self._meta.table) + + +class _BoundModelsContext(_callable_context_manager): + def __init__(self, models, database, bind_refs, bind_backrefs): + self.models = models + self.database = database + self.bind_refs = bind_refs + self.bind_backrefs = bind_backrefs + + def __enter__(self): + self._orig_database = [] + for model in self.models: + self._orig_database.append(model._meta.database) + model.bind(self.database, self.bind_refs, self.bind_backrefs, + _exclude=set(self.models)) + return self.models + + def __exit__(self, exc_type, exc_val, exc_tb): + for model, db in zip(self.models, self._orig_database): + model.bind(db, self.bind_refs, self.bind_backrefs, + _exclude=set(self.models)) + + +class Model(with_metaclass(ModelBase, Node)): + def __init__(self, *args, **kwargs): + if kwargs.pop('__no_default__', None): + self.__data__ = {} + else: + self.__data__ = self._meta.get_default_dict() + self._dirty = set(self.__data__) + self.__rel__ = {} + + for k in kwargs: + setattr(self, k, kwargs[k]) + + def __str__(self): + return str(self._pk) if self._meta.primary_key is not False else 'n/a' + + @classmethod + def validate_model(cls): + pass + + @classmethod + def alias(cls, alias=None): + return ModelAlias(cls, alias) + + @classmethod + def select(cls, *fields): + is_default = not fields + if not fields: + fields = cls._meta.sorted_fields + return ModelSelect(cls, fields, is_default=is_default) + + @classmethod + def _normalize_data(cls, data, kwargs): + normalized = {} + if data: + if not isinstance(data, dict): + if kwargs: + raise ValueError('Data cannot be mixed with keyword ' + 'arguments: %s' % data) + return data + for key in data: + try: + field = (key if isinstance(key, Field) + else cls._meta.combined[key]) + except KeyError: + if not isinstance(key, Node): + raise ValueError('Unrecognized field name: "%s" in %s.' + % (key, data)) + field = key + normalized[field] = data[key] + if kwargs: + for key in kwargs: + try: + normalized[cls._meta.combined[key]] = kwargs[key] + except KeyError: + normalized[getattr(cls, key)] = kwargs[key] + return normalized + + @classmethod + def update(cls, __data=None, **update): + return ModelUpdate(cls, cls._normalize_data(__data, update)) + + @classmethod + def insert(cls, __data=None, **insert): + return ModelInsert(cls, cls._normalize_data(__data, insert)) + + @classmethod + def insert_many(cls, rows, fields=None): + return ModelInsert(cls, insert=rows, columns=fields) + + @classmethod + def insert_from(cls, query, fields): + columns = [getattr(cls, field) if isinstance(field, basestring) + else field for field in fields] + return ModelInsert(cls, insert=query, columns=columns) + + @classmethod + def replace(cls, __data=None, **insert): + return cls.insert(__data, **insert).on_conflict('REPLACE') + + @classmethod + def replace_many(cls, rows, fields=None): + return (cls + .insert_many(rows=rows, fields=fields) + .on_conflict('REPLACE')) + + @classmethod + def raw(cls, sql, *params): + return ModelRaw(cls, sql, params) + + @classmethod + def delete(cls): + return ModelDelete(cls) + + @classmethod + def create(cls, **query): + inst = cls(**query) + inst.save(force_insert=True) + return inst + + @classmethod + def bulk_create(cls, model_list, batch_size=None): + if batch_size is not None: + batches = chunked(model_list, batch_size) + else: + batches = [model_list] + + field_names = list(cls._meta.sorted_field_names) + if cls._meta.auto_increment: + pk_name = cls._meta.primary_key.name + field_names.remove(pk_name) + + if cls._meta.database.returning_clause and \ + cls._meta.primary_key is not False: + pk_fields = cls._meta.get_primary_keys() + else: + pk_fields = None + + fields = [cls._meta.fields[field_name] for field_name in field_names] + attrs = [] + for field in fields: + if isinstance(field, ForeignKeyField): + attrs.append(field.object_id_name) + else: + attrs.append(field.name) + + for batch in batches: + accum = ([getattr(model, f) for f in attrs] + for model in batch) + res = cls.insert_many(accum, fields=fields).execute() + if pk_fields and res is not None: + for row, model in zip(res, batch): + for (pk_field, obj_id) in zip(pk_fields, row): + setattr(model, pk_field.name, obj_id) + + @classmethod + def bulk_update(cls, model_list, fields, batch_size=None): + if isinstance(cls._meta.primary_key, CompositeKey): + raise ValueError('bulk_update() is not supported for models with ' + 'a composite primary key.') + + # First normalize list of fields so all are field instances. + fields = [cls._meta.fields[f] if isinstance(f, basestring) else f + for f in fields] + # Now collect list of attribute names to use for values. + attrs = [field.object_id_name if isinstance(field, ForeignKeyField) + else field.name for field in fields] + + if batch_size is not None: + batches = chunked(model_list, batch_size) + else: + batches = [model_list] + + n = 0 + pk = cls._meta.primary_key + + for batch in batches: + id_list = [model._pk for model in batch] + update = {} + for field, attr in zip(fields, attrs): + accum = [] + for model in batch: + value = getattr(model, attr) + if not isinstance(value, Node): + value = field.to_value(value) + accum.append((pk.to_value(model._pk), value)) + case = Case(pk, accum) + update[field] = case + + n += (cls.update(update) + .where(cls._meta.primary_key.in_(id_list)) + .execute()) + return n + + @classmethod + def noop(cls): + return NoopModelSelect(cls, ()) + + @classmethod + def get(cls, *query, **filters): + sq = cls.select() + if query: + # Handle simple lookup using just the primary key. + if len(query) == 1 and isinstance(query[0], int): + sq = sq.where(cls._meta.primary_key == query[0]) + else: + sq = sq.where(*query) + if filters: + sq = sq.filter(**filters) + return sq.get() + + @classmethod + def get_or_none(cls, *query, **filters): + try: + return cls.get(*query, **filters) + except DoesNotExist: + pass + + @classmethod + def get_by_id(cls, pk): + return cls.get(cls._meta.primary_key == pk) + + @classmethod + def set_by_id(cls, key, value): + if key is None: + return cls.insert(value).execute() + else: + return (cls.update(value) + .where(cls._meta.primary_key == key).execute()) + + @classmethod + def delete_by_id(cls, pk): + return cls.delete().where(cls._meta.primary_key == pk).execute() + + @classmethod + def get_or_create(cls, **kwargs): + defaults = kwargs.pop('defaults', {}) + query = cls.select() + for field, value in kwargs.items(): + query = query.where(getattr(cls, field) == value) + + try: + return query.get(), False + except cls.DoesNotExist: + try: + if defaults: + kwargs.update(defaults) + with cls._meta.database.atomic(): + return cls.create(**kwargs), True + except IntegrityError as exc: + try: + return query.get(), False + except cls.DoesNotExist: + raise exc + + @classmethod + def filter(cls, *dq_nodes, **filters): + return cls.select().filter(*dq_nodes, **filters) + + def get_id(self): + # Using getattr(self, pk-name) could accidentally trigger a query if + # the primary-key is a foreign-key. So we use the safe_name attribute, + # which defaults to the field-name, but will be the object_id_name for + # foreign-key fields. + if self._meta.primary_key is not False: + return getattr(self, self._meta.primary_key.safe_name) + + _pk = property(get_id) + + @_pk.setter + def _pk(self, value): + setattr(self, self._meta.primary_key.name, value) + + def _pk_expr(self): + return self._meta.primary_key == self._pk + + def _prune_fields(self, field_dict, only): + new_data = {} + for field in only: + if isinstance(field, basestring): + field = self._meta.combined[field] + if field.name in field_dict: + new_data[field.name] = field_dict[field.name] + return new_data + + def _populate_unsaved_relations(self, field_dict): + for foreign_key_field in self._meta.refs: + foreign_key = foreign_key_field.name + conditions = ( + foreign_key in field_dict and + field_dict[foreign_key] is None and + self.__rel__.get(foreign_key) is not None) + if conditions: + setattr(self, foreign_key, getattr(self, foreign_key)) + field_dict[foreign_key] = self.__data__[foreign_key] + + def save(self, force_insert=False, only=None): + field_dict = self.__data__.copy() + if self._meta.primary_key is not False: + pk_field = self._meta.primary_key + pk_value = self._pk + else: + pk_field = pk_value = None + if only is not None: + field_dict = self._prune_fields(field_dict, only) + elif self._meta.only_save_dirty and not force_insert: + field_dict = self._prune_fields(field_dict, self.dirty_fields) + if not field_dict: + self._dirty.clear() + return False + + self._populate_unsaved_relations(field_dict) + rows = 1 + + if self._meta.auto_increment and pk_value is None: + field_dict.pop(pk_field.name, None) + + if pk_value is not None and not force_insert: + if self._meta.composite_key: + for pk_part_name in pk_field.field_names: + field_dict.pop(pk_part_name, None) + else: + field_dict.pop(pk_field.name, None) + if not field_dict: + raise ValueError('no data to save!') + rows = self.update(**field_dict).where(self._pk_expr()).execute() + elif pk_field is not None: + pk = self.insert(**field_dict).execute() + if pk is not None and (self._meta.auto_increment or + pk_value is None): + self._pk = pk + # Although we set the primary-key, do not mark it as dirty. + self._dirty.discard(pk_field.name) + else: + self.insert(**field_dict).execute() + + self._dirty -= set(field_dict) # Remove any fields we saved. + return rows + + def is_dirty(self): + return bool(self._dirty) + + @property + def dirty_fields(self): + return [f for f in self._meta.sorted_fields if f.name in self._dirty] + + def dependencies(self, search_nullable=False): + model_class = type(self) + stack = [(type(self), None)] + seen = set() + + while stack: + klass, query = stack.pop() + if klass in seen: + continue + seen.add(klass) + for fk, rel_model in klass._meta.backrefs.items(): + if rel_model is model_class or query is None: + node = (fk == self.__data__[fk.rel_field.name]) + else: + node = fk << query + subquery = (rel_model.select(rel_model._meta.primary_key) + .where(node)) + if not fk.null or search_nullable: + stack.append((rel_model, subquery)) + yield (node, fk) + + def delete_instance(self, recursive=False, delete_nullable=False): + if recursive: + dependencies = self.dependencies(delete_nullable) + for query, fk in reversed(list(dependencies)): + model = fk.model + if fk.null and not delete_nullable: + model.update(**{fk.name: None}).where(query).execute() + else: + model.delete().where(query).execute() + return type(self).delete().where(self._pk_expr()).execute() + + def __hash__(self): + return hash((self.__class__, self._pk)) + + def __eq__(self, other): + return ( + other.__class__ == self.__class__ and + self._pk is not None and + self._pk == other._pk) + + def __ne__(self, other): + return not self == other + + def __sql__(self, ctx): + # NOTE: when comparing a foreign-key field whose related-field is not a + # primary-key, then doing an equality test for the foreign-key with a + # model instance will return the wrong value; since we would return + # the primary key for a given model instance. + # + # This checks to see if we have a converter in the scope, and that we + # are converting a foreign-key expression. If so, we hand the model + # instance to the converter rather than blindly grabbing the primary- + # key. In the event the provided converter fails to handle the model + # instance, then we will return the primary-key. + if ctx.state.converter is not None and ctx.state.is_fk_expr: + try: + return ctx.sql(Value(self, converter=ctx.state.converter)) + except (TypeError, ValueError): + pass + + return ctx.sql(Value(getattr(self, self._meta.primary_key.name), + converter=self._meta.primary_key.db_value)) + + @classmethod + def bind(cls, database, bind_refs=True, bind_backrefs=True, _exclude=None): + is_different = cls._meta.database is not database + cls._meta.set_database(database) + if bind_refs or bind_backrefs: + if _exclude is None: + _exclude = set() + G = cls._meta.model_graph(refs=bind_refs, backrefs=bind_backrefs) + for _, model, is_backref in G: + if model not in _exclude: + model._meta.set_database(database) + _exclude.add(model) + return is_different + + @classmethod + def bind_ctx(cls, database, bind_refs=True, bind_backrefs=True): + return _BoundModelsContext((cls,), database, bind_refs, bind_backrefs) + + @classmethod + def table_exists(cls): + M = cls._meta + return cls._schema.database.table_exists(M.table.__name__, M.schema) + + @classmethod + def create_table(cls, safe=True, **options): + if 'fail_silently' in options: + __deprecated__('"fail_silently" has been deprecated in favor of ' + '"safe" for the create_table() method.') + safe = options.pop('fail_silently') + + if safe and not cls._schema.database.safe_create_index \ + and cls.table_exists(): + return + if cls._meta.temporary: + options.setdefault('temporary', cls._meta.temporary) + cls._schema.create_all(safe, **options) + + @classmethod + def drop_table(cls, safe=True, drop_sequences=True, **options): + if safe and not cls._schema.database.safe_drop_index \ + and not cls.table_exists(): + return + if cls._meta.temporary: + options.setdefault('temporary', cls._meta.temporary) + cls._schema.drop_all(safe, drop_sequences, **options) + + @classmethod + def truncate_table(cls, **options): + cls._schema.truncate_table(**options) + + @classmethod + def index(cls, *fields, **kwargs): + return ModelIndex(cls, fields, **kwargs) + + @classmethod + def add_index(cls, *fields, **kwargs): + if len(fields) == 1 and isinstance(fields[0], (SQL, Index)): + cls._meta.indexes.append(fields[0]) + else: + cls._meta.indexes.append(ModelIndex(cls, fields, **kwargs)) + + +class ModelAlias(Node): + """Provide a separate reference to a model in a query.""" + def __init__(self, model, alias=None): + self.__dict__['model'] = model + self.__dict__['alias'] = alias + + def __getattr__(self, attr): + # Hack to work-around the fact that properties or other objects + # implementing the descriptor protocol (on the model being aliased), + # will not work correctly when we use getattr(). So we explicitly pass + # the model alias to the descriptor's getter. + try: + obj = self.model.__dict__[attr] + except KeyError: + pass + else: + if isinstance(obj, ModelDescriptor): + return obj.__get__(None, self) + + model_attr = getattr(self.model, attr) + if isinstance(model_attr, Field): + self.__dict__[attr] = FieldAlias.create(self, model_attr) + return self.__dict__[attr] + return model_attr + + def __setattr__(self, attr, value): + raise AttributeError('Cannot set attributes on model aliases.') + + def get_field_aliases(self): + return [getattr(self, n) for n in self.model._meta.sorted_field_names] + + def select(self, *selection): + if not selection: + selection = self.get_field_aliases() + return ModelSelect(self, selection) + + def __call__(self, **kwargs): + return self.model(**kwargs) + + def __sql__(self, ctx): + if ctx.scope == SCOPE_VALUES: + # Return the quoted table name. + return ctx.sql(self.model) + + if self.alias: + ctx.alias_manager[self] = self.alias + + if ctx.scope == SCOPE_SOURCE: + # Define the table and its alias. + return (ctx + .sql(self.model._meta.entity) + .literal(' AS ') + .sql(Entity(ctx.alias_manager[self]))) + else: + # Refer to the table using the alias. + return ctx.sql(Entity(ctx.alias_manager[self])) + + +class FieldAlias(Field): + def __init__(self, source, field): + self.source = source + self.model = source.model + self.field = field + + @classmethod + def create(cls, source, field): + class _FieldAlias(cls, type(field)): + pass + return _FieldAlias(source, field) + + def clone(self): + return FieldAlias(self.source, self.field) + + def adapt(self, value): return self.field.adapt(value) + def python_value(self, value): return self.field.python_value(value) + def db_value(self, value): return self.field.db_value(value) + def __getattr__(self, attr): + return self.source if attr == 'model' else getattr(self.field, attr) + + def __sql__(self, ctx): + return ctx.sql(Column(self.source, self.field.column_name)) + + +def sort_models(models): + models = set(models) + seen = set() + ordering = [] + def dfs(model): + if model in models and model not in seen: + seen.add(model) + for foreign_key, rel_model in model._meta.refs.items(): + # Do not depth-first search deferred foreign-keys as this can + # cause tables to be created in the incorrect order. + if not foreign_key.deferred: + dfs(rel_model) + if model._meta.depends_on: + for dependency in model._meta.depends_on: + dfs(dependency) + ordering.append(model) + + names = lambda m: (m._meta.name, m._meta.table_name) + for m in sorted(models, key=names): + dfs(m) + return ordering + + +class _ModelQueryHelper(object): + default_row_type = ROW.MODEL + + def __init__(self, *args, **kwargs): + super(_ModelQueryHelper, self).__init__(*args, **kwargs) + if not self._database: + self._database = self.model._meta.database + + @Node.copy + def objects(self, constructor=None): + self._row_type = ROW.CONSTRUCTOR + self._constructor = self.model if constructor is None else constructor + + def _get_cursor_wrapper(self, cursor): + row_type = self._row_type or self.default_row_type + if row_type == ROW.MODEL: + return self._get_model_cursor_wrapper(cursor) + elif row_type == ROW.DICT: + return ModelDictCursorWrapper(cursor, self.model, self._returning) + elif row_type == ROW.TUPLE: + return ModelTupleCursorWrapper(cursor, self.model, self._returning) + elif row_type == ROW.NAMED_TUPLE: + return ModelNamedTupleCursorWrapper(cursor, self.model, + self._returning) + elif row_type == ROW.CONSTRUCTOR: + return ModelObjectCursorWrapper(cursor, self.model, + self._returning, self._constructor) + else: + raise ValueError('Unrecognized row type: "%s".' % row_type) + + def _get_model_cursor_wrapper(self, cursor): + return ModelObjectCursorWrapper(cursor, self.model, [], self.model) + + +class ModelRaw(_ModelQueryHelper, RawQuery): + def __init__(self, model, sql, params, **kwargs): + self.model = model + self._returning = () + super(ModelRaw, self).__init__(sql=sql, params=params, **kwargs) + + def get(self): + try: + return self.execute()[0] + except IndexError: + sql, params = self.sql() + raise self.model.DoesNotExist('%s instance matching query does ' + 'not exist:\nSQL: %s\nParams: %s' % + (self.model, sql, params)) + + +class BaseModelSelect(_ModelQueryHelper): + def union_all(self, rhs): + return ModelCompoundSelectQuery(self.model, self, 'UNION ALL', rhs) + __add__ = union_all + + def union(self, rhs): + return ModelCompoundSelectQuery(self.model, self, 'UNION', rhs) + __or__ = union + + def intersect(self, rhs): + return ModelCompoundSelectQuery(self.model, self, 'INTERSECT', rhs) + __and__ = intersect + + def except_(self, rhs): + return ModelCompoundSelectQuery(self.model, self, 'EXCEPT', rhs) + __sub__ = except_ + + def __iter__(self): + if not self._cursor_wrapper: + self.execute() + return iter(self._cursor_wrapper) + + def prefetch(self, *subqueries): + return prefetch(self, *subqueries) + + def get(self, database=None): + clone = self.paginate(1, 1) + clone._cursor_wrapper = None + try: + return clone.execute(database)[0] + except IndexError: + sql, params = clone.sql() + raise self.model.DoesNotExist('%s instance matching query does ' + 'not exist:\nSQL: %s\nParams: %s' % + (clone.model, sql, params)) + + def get_or_none(self, database=None): + try: + return self.get(database=database) + except self.model.DoesNotExist: + pass + + @Node.copy + def group_by(self, *columns): + grouping = [] + for column in columns: + if is_model(column): + grouping.extend(column._meta.sorted_fields) + elif isinstance(column, Table): + if not column._columns: + raise ValueError('Cannot pass a table to group_by() that ' + 'does not have columns explicitly ' + 'declared.') + grouping.extend([getattr(column, col_name) + for col_name in column._columns]) + else: + grouping.append(column) + self._group_by = grouping + + +class ModelCompoundSelectQuery(BaseModelSelect, CompoundSelectQuery): + def __init__(self, model, *args, **kwargs): + self.model = model + super(ModelCompoundSelectQuery, self).__init__(*args, **kwargs) + + def _get_model_cursor_wrapper(self, cursor): + return self.lhs._get_model_cursor_wrapper(cursor) + + +def _normalize_model_select(fields_or_models): + fields = [] + for fm in fields_or_models: + if is_model(fm): + fields.extend(fm._meta.sorted_fields) + elif isinstance(fm, ModelAlias): + fields.extend(fm.get_field_aliases()) + elif isinstance(fm, Table) and fm._columns: + fields.extend([getattr(fm, col) for col in fm._columns]) + else: + fields.append(fm) + return fields + + +class ModelSelect(BaseModelSelect, Select): + def __init__(self, model, fields_or_models, is_default=False): + self.model = self._join_ctx = model + self._joins = {} + self._is_default = is_default + fields = _normalize_model_select(fields_or_models) + super(ModelSelect, self).__init__([model], fields) + + def clone(self): + clone = super(ModelSelect, self).clone() + if clone._joins: + clone._joins = dict(clone._joins) + return clone + + def select(self, *fields_or_models): + if fields_or_models or not self._is_default: + self._is_default = False + fields = _normalize_model_select(fields_or_models) + return super(ModelSelect, self).select(*fields) + return self + + def switch(self, ctx=None): + self._join_ctx = self.model if ctx is None else ctx + return self + + def _get_model(self, src): + if is_model(src): + return src, True + elif isinstance(src, Table) and src._model: + return src._model, False + elif isinstance(src, ModelAlias): + return src.model, False + elif isinstance(src, ModelSelect): + return src.model, False + return None, False + + def _normalize_join(self, src, dest, on, attr): + # Allow "on" expression to have an alias that determines the + # destination attribute for the joined data. + on_alias = isinstance(on, Alias) + if on_alias: + attr = attr or on._alias + on = on.alias() + + # Obtain references to the source and destination models being joined. + src_model, src_is_model = self._get_model(src) + dest_model, dest_is_model = self._get_model(dest) + + if src_model and dest_model: + self._join_ctx = dest + constructor = dest_model + + # In the case where the "on" clause is a Column or Field, we will + # convert that field into the appropriate predicate expression. + if not (src_is_model and dest_is_model) and isinstance(on, Column): + if on.source is src: + to_field = src_model._meta.columns[on.name] + elif on.source is dest: + to_field = dest_model._meta.columns[on.name] + else: + raise AttributeError('"on" clause Column %s does not ' + 'belong to %s or %s.' % + (on, src_model, dest_model)) + on = None + elif isinstance(on, Field): + to_field = on + on = None + else: + to_field = None + + fk_field, is_backref = self._generate_on_clause( + src_model, dest_model, to_field, on) + + if on is None: + src_attr = 'name' if src_is_model else 'column_name' + dest_attr = 'name' if dest_is_model else 'column_name' + if is_backref: + lhs = getattr(dest, getattr(fk_field, dest_attr)) + rhs = getattr(src, getattr(fk_field.rel_field, src_attr)) + else: + lhs = getattr(src, getattr(fk_field, src_attr)) + rhs = getattr(dest, getattr(fk_field.rel_field, dest_attr)) + on = (lhs == rhs) + + if not attr: + if fk_field is not None and not is_backref: + attr = fk_field.name + else: + attr = dest_model._meta.name + elif on_alias and fk_field is not None and \ + attr == fk_field.object_id_name and not is_backref: + raise ValueError('Cannot assign join alias to "%s", as this ' + 'attribute is the object_id_name for the ' + 'foreign-key field "%s"' % (attr, fk_field)) + + elif isinstance(dest, Source): + constructor = dict + attr = attr or dest._alias + if not attr and isinstance(dest, Table): + attr = attr or dest.__name__ + + return (on, attr, constructor) + + def _generate_on_clause(self, src, dest, to_field=None, on=None): + meta = src._meta + is_backref = fk_fields = False + + # Get all the foreign keys between source and dest, and determine if + # the join is via a back-reference. + if dest in meta.model_refs: + fk_fields = meta.model_refs[dest] + elif dest in meta.model_backrefs: + fk_fields = meta.model_backrefs[dest] + is_backref = True + + if not fk_fields: + if on is not None: + return None, False + raise ValueError('Unable to find foreign key between %s and %s. ' + 'Please specify an explicit join condition.' % + (src, dest)) + elif to_field is not None: + # If the foreign-key field was specified explicitly, remove all + # other foreign-key fields from the list. + target = (to_field.field if isinstance(to_field, FieldAlias) + else to_field) + fk_fields = [f for f in fk_fields if ( + (f is target) or + (is_backref and f.rel_field is to_field))] + + if len(fk_fields) == 1: + return fk_fields[0], is_backref + + if on is None: + # If multiple foreign-keys exist, try using the FK whose name + # matches that of the related model. If not, raise an error as this + # is ambiguous. + for fk in fk_fields: + if fk.name == dest._meta.name: + return fk, is_backref + + raise ValueError('More than one foreign key between %s and %s.' + ' Please specify which you are joining on.' % + (src, dest)) + + # If there are multiple foreign-keys to choose from and the join + # predicate is an expression, we'll try to figure out which + # foreign-key field we're joining on so that we can assign to the + # correct attribute when resolving the model graph. + to_field = None + if isinstance(on, Expression): + lhs, rhs = on.lhs, on.rhs + # Coerce to set() so that we force Python to compare using the + # object's hash rather than equality test, which returns a + # false-positive due to overriding __eq__. + fk_set = set(fk_fields) + + if isinstance(lhs, Field): + lhs_f = lhs.field if isinstance(lhs, FieldAlias) else lhs + if lhs_f in fk_set: + to_field = lhs_f + elif isinstance(rhs, Field): + rhs_f = rhs.field if isinstance(rhs, FieldAlias) else rhs + if rhs_f in fk_set: + to_field = rhs_f + + return to_field, False + + @Node.copy + def join(self, dest, join_type=JOIN.INNER, on=None, src=None, attr=None): + src = self._join_ctx if src is None else src + + if join_type == JOIN.LATERAL or join_type == JOIN.LEFT_LATERAL: + on = True + elif join_type != JOIN.CROSS: + on, attr, constructor = self._normalize_join(src, dest, on, attr) + if attr: + self._joins.setdefault(src, []) + self._joins[src].append((dest, attr, constructor, join_type)) + elif on is not None: + raise ValueError('Cannot specify on clause with cross join.') + + if not self._from_list: + raise ValueError('No sources to join on.') + + item = self._from_list.pop() + self._from_list.append(Join(item, dest, join_type, on)) + + def join_from(self, src, dest, join_type=JOIN.INNER, on=None, attr=None): + return self.join(dest, join_type, on, src, attr) + + def _get_model_cursor_wrapper(self, cursor): + if len(self._from_list) == 1 and not self._joins: + return ModelObjectCursorWrapper(cursor, self.model, + self._returning, self.model) + return ModelCursorWrapper(cursor, self.model, self._returning, + self._from_list, self._joins) + + def ensure_join(self, lm, rm, on=None, **join_kwargs): + join_ctx = self._join_ctx + for dest, _, constructor, _ in self._joins.get(lm, []): + if dest == rm: + return self + return self.switch(lm).join(rm, on=on, **join_kwargs).switch(join_ctx) + + def convert_dict_to_node(self, qdict): + accum = [] + joins = [] + fks = (ForeignKeyField, BackrefAccessor) + for key, value in sorted(qdict.items()): + curr = self.model + if '__' in key and key.rsplit('__', 1)[1] in DJANGO_MAP: + key, op = key.rsplit('__', 1) + op = DJANGO_MAP[op] + elif value is None: + op = DJANGO_MAP['is'] + else: + op = DJANGO_MAP['eq'] + + if '__' not in key: + # Handle simplest case. This avoids joining over-eagerly when a + # direct FK lookup is all that is required. + model_attr = getattr(curr, key) + else: + for piece in key.split('__'): + for dest, attr, _, _ in self._joins.get(curr, ()): + if attr == piece or (isinstance(dest, ModelAlias) and + dest.alias == piece): + curr = dest + break + else: + model_attr = getattr(curr, piece) + if value is not None and isinstance(model_attr, fks): + curr = model_attr.rel_model + joins.append(model_attr) + accum.append(op(model_attr, value)) + return accum, joins + + def filter(self, *args, **kwargs): + # normalize args and kwargs into a new expression + if args and kwargs: + dq_node = (reduce(operator.and_, [a.clone() for a in args]) & + DQ(**kwargs)) + elif args: + dq_node = (reduce(operator.and_, [a.clone() for a in args]) & + ColumnBase()) + elif kwargs: + dq_node = DQ(**kwargs) & ColumnBase() + else: + return self.clone() + + # dq_node should now be an Expression, lhs = Node(), rhs = ... + q = collections.deque([dq_node]) + dq_joins = [] + seen_joins = set() + while q: + curr = q.popleft() + if not isinstance(curr, Expression): + continue + for side, piece in (('lhs', curr.lhs), ('rhs', curr.rhs)): + if isinstance(piece, DQ): + query, joins = self.convert_dict_to_node(piece.query) + for join in joins: + if join not in seen_joins: + dq_joins.append(join) + seen_joins.add(join) + expression = reduce(operator.and_, query) + # Apply values from the DQ object. + if piece._negated: + expression = Negated(expression) + #expression._alias = piece._alias + setattr(curr, side, expression) + else: + q.append(piece) + + if not args or not kwargs: + dq_node = dq_node.lhs + + query = self.clone() + for field in dq_joins: + if isinstance(field, ForeignKeyField): + lm, rm = field.model, field.rel_model + field_obj = field + elif isinstance(field, BackrefAccessor): + lm, rm = field.model, field.rel_model + field_obj = field.field + query = query.ensure_join(lm, rm, field_obj) + return query.where(dq_node) + + def create_table(self, name, safe=True, **meta): + return self.model._schema.create_table_as(name, self, safe, **meta) + + def __sql_selection__(self, ctx, is_subquery=False): + if self._is_default and is_subquery and len(self._returning) > 1 and \ + self.model._meta.primary_key is not False: + return ctx.sql(self.model._meta.primary_key) + + return ctx.sql(CommaNodeList(self._returning)) + + +class NoopModelSelect(ModelSelect): + def __sql__(self, ctx): + return self.model._meta.database.get_noop_select(ctx) + + def _get_cursor_wrapper(self, cursor): + return CursorWrapper(cursor) + + +class _ModelWriteQueryHelper(_ModelQueryHelper): + def __init__(self, model, *args, **kwargs): + self.model = model + super(_ModelWriteQueryHelper, self).__init__(model, *args, **kwargs) + + def returning(self, *returning): + accum = [] + for item in returning: + if is_model(item): + accum.extend(item._meta.sorted_fields) + else: + accum.append(item) + return super(_ModelWriteQueryHelper, self).returning(*accum) + + def _set_table_alias(self, ctx): + table = self.model._meta.table + ctx.alias_manager[table] = table.__name__ + + +class ModelUpdate(_ModelWriteQueryHelper, Update): + pass + + +class ModelInsert(_ModelWriteQueryHelper, Insert): + default_row_type = ROW.TUPLE + + def __init__(self, *args, **kwargs): + super(ModelInsert, self).__init__(*args, **kwargs) + if self._returning is None and self.model._meta.database is not None: + if self.model._meta.database.returning_clause: + self._returning = self.model._meta.get_primary_keys() + + def returning(self, *returning): + # By default ModelInsert will yield a `tuple` containing the + # primary-key of the newly inserted row. But if we are explicitly + # specifying a returning clause and have not set a row type, we will + # default to returning model instances instead. + if returning and self._row_type is None: + self._row_type = ROW.MODEL + return super(ModelInsert, self).returning(*returning) + + def get_default_data(self): + return self.model._meta.defaults + + def get_default_columns(self): + fields = self.model._meta.sorted_fields + return fields[1:] if self.model._meta.auto_increment else fields + + +class ModelDelete(_ModelWriteQueryHelper, Delete): + pass + + +class ManyToManyQuery(ModelSelect): + def __init__(self, instance, accessor, rel, *args, **kwargs): + self._instance = instance + self._accessor = accessor + self._src_attr = accessor.src_fk.rel_field.name + self._dest_attr = accessor.dest_fk.rel_field.name + super(ManyToManyQuery, self).__init__(rel, (rel,), *args, **kwargs) + + def _id_list(self, model_or_id_list): + if isinstance(model_or_id_list[0], Model): + return [getattr(obj, self._dest_attr) for obj in model_or_id_list] + return model_or_id_list + + def add(self, value, clear_existing=False): + if clear_existing: + self.clear() + + accessor = self._accessor + src_id = getattr(self._instance, self._src_attr) + if isinstance(value, SelectQuery): + query = value.columns( + Value(src_id), + accessor.dest_fk.rel_field) + accessor.through_model.insert_from( + fields=[accessor.src_fk, accessor.dest_fk], + query=query).execute() + else: + value = ensure_tuple(value) + if not value: return + + inserts = [{ + accessor.src_fk.name: src_id, + accessor.dest_fk.name: rel_id} + for rel_id in self._id_list(value)] + accessor.through_model.insert_many(inserts).execute() + + def remove(self, value): + src_id = getattr(self._instance, self._src_attr) + if isinstance(value, SelectQuery): + column = getattr(value.model, self._dest_attr) + subquery = value.columns(column) + return (self._accessor.through_model + .delete() + .where( + (self._accessor.dest_fk << subquery) & + (self._accessor.src_fk == src_id)) + .execute()) + else: + value = ensure_tuple(value) + if not value: + return + return (self._accessor.through_model + .delete() + .where( + (self._accessor.dest_fk << self._id_list(value)) & + (self._accessor.src_fk == src_id)) + .execute()) + + def clear(self): + src_id = getattr(self._instance, self._src_attr) + return (self._accessor.through_model + .delete() + .where(self._accessor.src_fk == src_id) + .execute()) + + +def safe_python_value(conv_func): + def validate(value): + try: + return conv_func(value) + except (TypeError, ValueError): + return value + return validate + + +class BaseModelCursorWrapper(DictCursorWrapper): + def __init__(self, cursor, model, columns): + super(BaseModelCursorWrapper, self).__init__(cursor) + self.model = model + self.select = columns or [] + + def _initialize_columns(self): + combined = self.model._meta.combined + table = self.model._meta.table + description = self.cursor.description + + self.ncols = len(self.cursor.description) + self.columns = [] + self.converters = converters = [None] * self.ncols + self.fields = fields = [None] * self.ncols + + for idx, description_item in enumerate(description): + column = orig_column = description_item[0] + + # Try to clean-up messy column descriptions when people do not + # provide an alias. The idea is that we take something like: + # SUM("t1"."price") -> "price") -> price + dot_index = column.rfind('.') + if dot_index != -1: + column = column[dot_index + 1:] + column = column.strip('()"`') + self.columns.append(column) + + # Now we'll see what they selected and see if we can improve the + # column-name being returned - e.g. by mapping it to the selected + # field's name. + try: + raw_node = self.select[idx] + except IndexError: + if column in combined: + raw_node = node = combined[column] + else: + continue + else: + node = raw_node.unwrap() + + # If this column was given an alias, then we will use whatever + # alias was returned by the cursor. + is_alias = raw_node.is_alias() + if is_alias: + self.columns[idx] = orig_column + + # Heuristics used to attempt to get the field associated with a + # given SELECT column, so that we can accurately convert the value + # returned by the database-cursor into a Python object. + if isinstance(node, Field): + if raw_node._coerce: + converters[idx] = node.python_value + fields[idx] = node + if not is_alias: + self.columns[idx] = node.name + elif isinstance(node, ColumnBase) and raw_node._converter: + converters[idx] = raw_node._converter + elif isinstance(node, Function) and node._coerce: + if node._python_value is not None: + converters[idx] = node._python_value + elif node.arguments and isinstance(node.arguments[0], Node): + # If the first argument is a field or references a column + # on a Model, try using that field's conversion function. + # This usually works, but we use "safe_python_value()" so + # that if a TypeError or ValueError occurs during + # conversion we can just fall-back to the raw cursor value. + first = node.arguments[0].unwrap() + if isinstance(first, Entity): + path = first._path[-1] # Try to look-up by name. + first = combined.get(path) + if isinstance(first, Field): + converters[idx] = safe_python_value(first.python_value) + elif column in combined: + if node._coerce: + converters[idx] = combined[column].python_value + if isinstance(node, Column) and node.source == table: + fields[idx] = combined[column] + + initialize = _initialize_columns + + def process_row(self, row): + raise NotImplementedError + + +class ModelDictCursorWrapper(BaseModelCursorWrapper): + def process_row(self, row): + result = {} + columns, converters = self.columns, self.converters + fields = self.fields + + for i in range(self.ncols): + attr = columns[i] + if attr in result: continue # Don't overwrite if we have dupes. + if converters[i] is not None: + result[attr] = converters[i](row[i]) + else: + result[attr] = row[i] + + return result + + +class ModelTupleCursorWrapper(ModelDictCursorWrapper): + constructor = tuple + + def process_row(self, row): + columns, converters = self.columns, self.converters + return self.constructor([ + (converters[i](row[i]) if converters[i] is not None else row[i]) + for i in range(self.ncols)]) + + +class ModelNamedTupleCursorWrapper(ModelTupleCursorWrapper): + def initialize(self): + self._initialize_columns() + attributes = [] + for i in range(self.ncols): + attributes.append(self.columns[i]) + self.tuple_class = collections.namedtuple('Row', attributes) + self.constructor = lambda row: self.tuple_class(*row) + + +class ModelObjectCursorWrapper(ModelDictCursorWrapper): + def __init__(self, cursor, model, select, constructor): + self.constructor = constructor + self.is_model = is_model(constructor) + super(ModelObjectCursorWrapper, self).__init__(cursor, model, select) + + def process_row(self, row): + data = super(ModelObjectCursorWrapper, self).process_row(row) + if self.is_model: + # Clear out any dirty fields before returning to the user. + obj = self.constructor(__no_default__=1, **data) + obj._dirty.clear() + return obj + else: + return self.constructor(**data) + + +class ModelCursorWrapper(BaseModelCursorWrapper): + def __init__(self, cursor, model, select, from_list, joins): + super(ModelCursorWrapper, self).__init__(cursor, model, select) + self.from_list = from_list + self.joins = joins + + def initialize(self): + self._initialize_columns() + selected_src = set([field.model for field in self.fields + if field is not None]) + select, columns = self.select, self.columns + + self.key_to_constructor = {self.model: self.model} + self.src_is_dest = {} + self.src_to_dest = [] + accum = collections.deque(self.from_list) + dests = set() + + while accum: + curr = accum.popleft() + if isinstance(curr, Join): + accum.append(curr.lhs) + accum.append(curr.rhs) + continue + + if curr not in self.joins: + continue + + is_dict = isinstance(curr, dict) + for key, attr, constructor, join_type in self.joins[curr]: + if key not in self.key_to_constructor: + self.key_to_constructor[key] = constructor + + # (src, attr, dest, is_dict, join_type). + self.src_to_dest.append((curr, attr, key, is_dict, + join_type)) + dests.add(key) + accum.append(key) + + # Ensure that we accommodate everything selected. + for src in selected_src: + if src not in self.key_to_constructor: + if is_model(src): + self.key_to_constructor[src] = src + elif isinstance(src, ModelAlias): + self.key_to_constructor[src] = src.model + + # Indicate which sources are also dests. + for src, _, dest, _, _ in self.src_to_dest: + self.src_is_dest[src] = src in dests and (dest in selected_src + or src in selected_src) + + self.column_keys = [] + for idx, node in enumerate(select): + key = self.model + field = self.fields[idx] + if field is not None: + if isinstance(field, FieldAlias): + key = field.source + else: + key = field.model + else: + if isinstance(node, Node): + node = node.unwrap() + if isinstance(node, Column): + key = node.source + + self.column_keys.append(key) + + def process_row(self, row): + objects = {} + object_list = [] + for key, constructor in self.key_to_constructor.items(): + objects[key] = constructor(__no_default__=True) + object_list.append(objects[key]) + + default_instance = objects[self.model] + + set_keys = set() + for idx, key in enumerate(self.column_keys): + # Get the instance corresponding to the selected column/value, + # falling back to the "root" model instance. + instance = objects.get(key, default_instance) + column = self.columns[idx] + value = row[idx] + if value is not None: + set_keys.add(key) + if self.converters[idx]: + value = self.converters[idx](value) + + if isinstance(instance, dict): + instance[column] = value + else: + setattr(instance, column, value) + + # Need to do some analysis on the joins before this. + for (src, attr, dest, is_dict, join_type) in self.src_to_dest: + instance = objects[src] + try: + joined_instance = objects[dest] + except KeyError: + continue + + # If no fields were set on the destination instance then do not + # assign an "empty" instance. + if instance is None or dest is None or \ + (dest not in set_keys and not self.src_is_dest.get(dest)): + continue + + # If no fields were set on either the source or the destination, + # then we have nothing to do here. + if instance not in set_keys and dest not in set_keys \ + and join_type.endswith('OUTER JOIN'): + continue + + if is_dict: + instance[attr] = joined_instance + else: + setattr(instance, attr, joined_instance) + + # When instantiating models from a cursor, we clear the dirty fields. + for instance in object_list: + if isinstance(instance, Model): + instance._dirty.clear() + + return objects[self.model] + + +class PrefetchQuery(collections.namedtuple('_PrefetchQuery', ( + 'query', 'fields', 'is_backref', 'rel_models', 'field_to_name', 'model'))): + def __new__(cls, query, fields=None, is_backref=None, rel_models=None, + field_to_name=None, model=None): + if fields: + if is_backref: + if rel_models is None: + rel_models = [field.model for field in fields] + foreign_key_attrs = [field.rel_field.name for field in fields] + else: + if rel_models is None: + rel_models = [field.rel_model for field in fields] + foreign_key_attrs = [field.name for field in fields] + field_to_name = list(zip(fields, foreign_key_attrs)) + model = query.model + return super(PrefetchQuery, cls).__new__( + cls, query, fields, is_backref, rel_models, field_to_name, model) + + def populate_instance(self, instance, id_map): + if self.is_backref: + for field in self.fields: + identifier = instance.__data__[field.name] + key = (field, identifier) + if key in id_map: + setattr(instance, field.name, id_map[key]) + else: + for field, attname in self.field_to_name: + identifier = instance.__data__[field.rel_field.name] + key = (field, identifier) + rel_instances = id_map.get(key, []) + for inst in rel_instances: + setattr(inst, attname, instance) + inst._dirty.clear() + setattr(instance, field.backref, rel_instances) + + def store_instance(self, instance, id_map): + for field, attname in self.field_to_name: + identity = field.rel_field.python_value(instance.__data__[attname]) + key = (field, identity) + if self.is_backref: + id_map[key] = instance + else: + id_map.setdefault(key, []) + id_map[key].append(instance) + + +def prefetch_add_subquery(sq, subqueries): + fixed_queries = [PrefetchQuery(sq)] + for i, subquery in enumerate(subqueries): + if isinstance(subquery, tuple): + subquery, target_model = subquery + else: + target_model = None + if not isinstance(subquery, Query) and is_model(subquery) or \ + isinstance(subquery, ModelAlias): + subquery = subquery.select() + subquery_model = subquery.model + fks = backrefs = None + for j in reversed(range(i + 1)): + fixed = fixed_queries[j] + last_query = fixed.query + last_model = last_obj = fixed.model + if isinstance(last_model, ModelAlias): + last_model = last_model.model + rels = subquery_model._meta.model_refs.get(last_model, []) + if rels: + fks = [getattr(subquery_model, fk.name) for fk in rels] + pks = [getattr(last_obj, fk.rel_field.name) for fk in rels] + else: + backrefs = subquery_model._meta.model_backrefs.get(last_model) + if (fks or backrefs) and ((target_model is last_obj) or + (target_model is None)): + break + + if not fks and not backrefs: + tgt_err = ' using %s' % target_model if target_model else '' + raise AttributeError('Error: unable to find foreign key for ' + 'query: %s%s' % (subquery, tgt_err)) + + dest = (target_model,) if target_model else None + + if fks: + expr = reduce(operator.or_, [ + (fk << last_query.select(pk)) + for (fk, pk) in zip(fks, pks)]) + subquery = subquery.where(expr) + fixed_queries.append(PrefetchQuery(subquery, fks, False, dest)) + elif backrefs: + expressions = [] + for backref in backrefs: + rel_field = getattr(subquery_model, backref.rel_field.name) + fk_field = getattr(last_obj, backref.name) + expressions.append(rel_field << last_query.select(fk_field)) + subquery = subquery.where(reduce(operator.or_, expressions)) + fixed_queries.append(PrefetchQuery(subquery, backrefs, True, dest)) + + return fixed_queries + + +def prefetch(sq, *subqueries): + if not subqueries: + return sq + + fixed_queries = prefetch_add_subquery(sq, subqueries) + deps = {} + rel_map = {} + for pq in reversed(fixed_queries): + query_model = pq.model + if pq.fields: + for rel_model in pq.rel_models: + rel_map.setdefault(rel_model, []) + rel_map[rel_model].append(pq) + + deps.setdefault(query_model, {}) + id_map = deps[query_model] + has_relations = bool(rel_map.get(query_model)) + + for instance in pq.query: + if pq.fields: + pq.store_instance(instance, id_map) + if has_relations: + for rel in rel_map[query_model]: + rel.populate_instance(instance, deps[rel.model]) + + return list(pq.query) diff --git a/python3.9libs/playhouse/__init__.py b/python3.9libs/playhouse/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python3.9libs/playhouse/apsw_ext.py b/python3.9libs/playhouse/apsw_ext.py new file mode 100644 index 0000000..bef93d5 --- /dev/null +++ b/python3.9libs/playhouse/apsw_ext.py @@ -0,0 +1,147 @@ +""" +Peewee integration with APSW, "another python sqlite wrapper". + +Project page: https://rogerbinns.github.io/apsw/ + +APSW is a really neat library that provides a thin wrapper on top of SQLite's +C interface. + +Here are just a few reasons to use APSW, taken from the documentation: + +* APSW gives all functionality of SQLite, including virtual tables, virtual + file system, blob i/o, backups and file control. +* Connections can be shared across threads without any additional locking. +* Transactions are managed explicitly by your code. +* APSW can handle nested transactions. +* Unicode is handled correctly. +* APSW is faster. +""" +import apsw +from peewee import * +from peewee import __exception_wrapper__ +from peewee import BooleanField as _BooleanField +from peewee import DateField as _DateField +from peewee import DateTimeField as _DateTimeField +from peewee import DecimalField as _DecimalField +from peewee import Insert +from peewee import TimeField as _TimeField +from peewee import logger + +from playhouse.sqlite_ext import SqliteExtDatabase + + +class APSWDatabase(SqliteExtDatabase): + server_version = tuple(int(i) for i in apsw.sqlitelibversion().split('.')) + + def __init__(self, database, **kwargs): + self._modules = {} + super(APSWDatabase, self).__init__(database, **kwargs) + + def register_module(self, mod_name, mod_inst): + self._modules[mod_name] = mod_inst + if not self.is_closed(): + self.connection().createmodule(mod_name, mod_inst) + + def unregister_module(self, mod_name): + del(self._modules[mod_name]) + + def _connect(self): + conn = apsw.Connection(self.database, **self.connect_params) + if self._timeout is not None: + conn.setbusytimeout(self._timeout * 1000) + try: + self._add_conn_hooks(conn) + except: + conn.close() + raise + return conn + + def _add_conn_hooks(self, conn): + super(APSWDatabase, self)._add_conn_hooks(conn) + self._load_modules(conn) # APSW-only. + + def _load_modules(self, conn): + for mod_name, mod_inst in self._modules.items(): + conn.createmodule(mod_name, mod_inst) + return conn + + def _load_aggregates(self, conn): + for name, (klass, num_params) in self._aggregates.items(): + def make_aggregate(): + return (klass(), klass.step, klass.finalize) + conn.createaggregatefunction(name, make_aggregate) + + def _load_collations(self, conn): + for name, fn in self._collations.items(): + conn.createcollation(name, fn) + + def _load_functions(self, conn): + for name, (fn, num_params) in self._functions.items(): + conn.createscalarfunction(name, fn, num_params) + + def _load_extensions(self, conn): + conn.enableloadextension(True) + for extension in self._extensions: + conn.loadextension(extension) + + def load_extension(self, extension): + self._extensions.add(extension) + if not self.is_closed(): + conn = self.connection() + conn.enableloadextension(True) + conn.loadextension(extension) + + def last_insert_id(self, cursor, query_type=None): + return cursor.getconnection().last_insert_rowid() + + def rows_affected(self, cursor): + return cursor.getconnection().changes() + + def begin(self, lock_type='deferred'): + self.cursor().execute('begin %s;' % lock_type) + + def commit(self): + with __exception_wrapper__: + curs = self.cursor() + if curs.getconnection().getautocommit(): + return False + curs.execute('commit;') + return True + + def rollback(self): + with __exception_wrapper__: + curs = self.cursor() + if curs.getconnection().getautocommit(): + return False + curs.execute('rollback;') + return True + + def execute_sql(self, sql, params=None, commit=True): + logger.debug((sql, params)) + with __exception_wrapper__: + cursor = self.cursor() + cursor.execute(sql, params or ()) + return cursor + + +def nh(s, v): + if v is not None: + return str(v) + +class BooleanField(_BooleanField): + def db_value(self, v): + v = super(BooleanField, self).db_value(v) + if v is not None: + return v and 1 or 0 + +class DateField(_DateField): + db_value = nh + +class TimeField(_TimeField): + db_value = nh + +class DateTimeField(_DateTimeField): + db_value = nh + +class DecimalField(_DecimalField): + db_value = nh diff --git a/python3.9libs/playhouse/cockroachdb.py b/python3.9libs/playhouse/cockroachdb.py new file mode 100644 index 0000000..8c29715 --- /dev/null +++ b/python3.9libs/playhouse/cockroachdb.py @@ -0,0 +1,224 @@ +import functools +import re +import sys + +from peewee import * +from peewee import _atomic +from peewee import _manual +from peewee import ColumnMetadata # (name, data_type, null, primary_key, table, default) +from peewee import EnclosedNodeList +from peewee import Entity +from peewee import ForeignKeyMetadata # (column, dest_table, dest_column, table). +from peewee import IndexMetadata +from peewee import NodeList +from playhouse.pool import _PooledPostgresqlDatabase +try: + from playhouse.postgres_ext import ArrayField + from playhouse.postgres_ext import BinaryJSONField + from playhouse.postgres_ext import IntervalField + JSONField = BinaryJSONField +except ImportError: # psycopg2 not installed, ignore. + ArrayField = BinaryJSONField = IntervalField = JSONField = None + +if sys.version_info[0] > 2: + basestring = str + + +NESTED_TX_MIN_VERSION = 200100 + +TXN_ERR_MSG = ('CockroachDB does not support nested transactions. You may ' + 'alternatively use the @transaction context-manager/decorator, ' + 'which only wraps the outer-most block in transactional logic. ' + 'To run a transaction with automatic retries, use the ' + 'run_transaction() helper.') + +class ExceededMaxAttempts(OperationalError): pass + + +class UUIDKeyField(UUIDField): + auto_increment = True + + def __init__(self, *args, **kwargs): + if kwargs.get('constraints'): + raise ValueError('%s cannot specify constraints.' % type(self)) + kwargs['constraints'] = [SQL('DEFAULT gen_random_uuid()')] + kwargs.setdefault('primary_key', True) + super(UUIDKeyField, self).__init__(*args, **kwargs) + + +class RowIDField(AutoField): + field_type = 'INT' + + def __init__(self, *args, **kwargs): + if kwargs.get('constraints'): + raise ValueError('%s cannot specify constraints.' % type(self)) + kwargs['constraints'] = [SQL('DEFAULT unique_rowid()')] + super(RowIDField, self).__init__(*args, **kwargs) + + +class CockroachDatabase(PostgresqlDatabase): + field_types = PostgresqlDatabase.field_types.copy() + field_types.update({ + 'BLOB': 'BYTES', + }) + + for_update = False + nulls_ordering = False + release_after_rollback = True + + def __init__(self, database, *args, **kwargs): + # Unless a DSN or database connection-url were specified, provide + # convenient defaults for the user and port. + if 'dsn' not in kwargs and (database and + not database.startswith('postgresql://')): + kwargs.setdefault('user', 'root') + kwargs.setdefault('port', 26257) + super(CockroachDatabase, self).__init__(database, *args, **kwargs) + + def _set_server_version(self, conn): + curs = conn.cursor() + curs.execute('select version()') + raw, = curs.fetchone() + match_obj = re.match(r'^CockroachDB.+?v(\d+)\.(\d+)\.(\d+)', raw) + if match_obj is not None: + clean = '%d%02d%02d' % tuple(int(i) for i in match_obj.groups()) + self.server_version = int(clean) # 19.1.5 -> 190105. + else: + # Fallback to use whatever cockroachdb tells us via protocol. + super(CockroachDatabase, self)._set_server_version(conn) + + def _get_pk_constraint(self, table, schema=None): + query = ('SELECT constraint_name ' + 'FROM information_schema.table_constraints ' + 'WHERE table_name = %s AND table_schema = %s ' + 'AND constraint_type = %s') + cursor = self.execute_sql(query, (table, schema or 'public', + 'PRIMARY KEY')) + row = cursor.fetchone() + return row and row[0] or None + + def get_indexes(self, table, schema=None): + # The primary-key index is returned by default, so we will just strip + # it out here. + indexes = super(CockroachDatabase, self).get_indexes(table, schema) + pkc = self._get_pk_constraint(table, schema) + return [idx for idx in indexes if (not pkc) or (idx.name != pkc)] + + def conflict_statement(self, on_conflict, query): + if not on_conflict._action: return + + action = on_conflict._action.lower() + if action in ('replace', 'upsert'): + return SQL('UPSERT') + elif action not in ('ignore', 'nothing', 'update'): + raise ValueError('Un-supported action for conflict resolution. ' + 'CockroachDB supports REPLACE (UPSERT), IGNORE ' + 'and UPDATE.') + + def conflict_update(self, oc, query): + action = oc._action.lower() if oc._action else '' + if action in ('ignore', 'nothing'): + parts = [SQL('ON CONFLICT')] + if oc._conflict_target: + parts.append(EnclosedNodeList([ + Entity(col) if isinstance(col, basestring) else col + for col in oc._conflict_target])) + parts.append(SQL('DO NOTHING')) + return NodeList(parts) + elif action in ('replace', 'upsert'): + # No special stuff is necessary, this is just indicated by starting + # the statement with UPSERT instead of INSERT. + return + elif oc._conflict_constraint: + raise ValueError('CockroachDB does not support the usage of a ' + 'constraint name. Use the column(s) instead.') + + return super(CockroachDatabase, self).conflict_update(oc, query) + + def extract_date(self, date_part, date_field): + return fn.extract(date_part, date_field) + + def from_timestamp(self, date_field): + # CRDB does not allow casting a decimal/float to timestamp, so we first + # cast to int, then to timestamptz. + return date_field.cast('int').cast('timestamptz') + + def begin(self, system_time=None, priority=None): + super(CockroachDatabase, self).begin() + if system_time is not None: + self.execute_sql('SET TRANSACTION AS OF SYSTEM TIME %s', + (system_time,), commit=False) + if priority is not None: + priority = priority.lower() + if priority not in ('low', 'normal', 'high'): + raise ValueError('priority must be low, normal or high') + self.execute_sql('SET TRANSACTION PRIORITY %s' % priority, + commit=False) + + def atomic(self, system_time=None, priority=None): + if self.server_version < NESTED_TX_MIN_VERSION: + return _crdb_atomic(self, system_time, priority) + return super(CockroachDatabase, self).atomic(system_time, priority) + + def savepoint(self): + if self.server_version < NESTED_TX_MIN_VERSION: + raise NotImplementedError(TXN_ERR_MSG) + return super(CockroachDatabase, self).savepoint() + + def retry_transaction(self, max_attempts=None, system_time=None, + priority=None): + def deco(cb): + @functools.wraps(cb) + def new_fn(): + return run_transaction(self, cb, max_attempts, system_time, + priority) + return new_fn + return deco + + def run_transaction(self, cb, max_attempts=None, system_time=None, + priority=None): + return run_transaction(self, cb, max_attempts, system_time, priority) + + +class _crdb_atomic(_atomic): + def __enter__(self): + if self.db.transaction_depth() > 0: + if not isinstance(self.db.top_transaction(), _manual): + raise NotImplementedError(TXN_ERR_MSG) + return super(_crdb_atomic, self).__enter__() + + +def run_transaction(db, callback, max_attempts=None, system_time=None, + priority=None): + """ + Run transactional SQL in a transaction with automatic retries. + + User-provided `callback`: + * Must accept one parameter, the `db` instance representing the connection + the transaction is running under. + * Must not attempt to commit, rollback or otherwise manage transactions. + * May be called more than once. + * Should ideally only contain SQL operations. + + Additionally, the database must not have any open transaction at the time + this function is called, as CRDB does not support nested transactions. + """ + max_attempts = max_attempts or -1 + with db.atomic(system_time=system_time, priority=priority) as txn: + db.execute_sql('SAVEPOINT cockroach_restart') + while max_attempts != 0: + try: + result = callback(db) + db.execute_sql('RELEASE SAVEPOINT cockroach_restart') + return result + except OperationalError as exc: + if exc.orig.pgcode == '40001': + max_attempts -= 1 + db.execute_sql('ROLLBACK TO SAVEPOINT cockroach_restart') + continue + raise + raise ExceededMaxAttempts(None, 'unable to commit transaction') + + +class PooledCockroachDatabase(_PooledPostgresqlDatabase, CockroachDatabase): + pass diff --git a/python3.9libs/playhouse/dataset.py b/python3.9libs/playhouse/dataset.py new file mode 100644 index 0000000..cdc9a4b --- /dev/null +++ b/python3.9libs/playhouse/dataset.py @@ -0,0 +1,454 @@ +import csv +import datetime +from decimal import Decimal +import json +import operator +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse +import sys +import uuid + +from peewee import * +from playhouse.db_url import connect +from playhouse.migrate import migrate +from playhouse.migrate import SchemaMigrator +from playhouse.reflection import Introspector + +if sys.version_info[0] == 3: + basestring = str + from functools import reduce + def open_file(f, mode, encoding='utf8'): + return open(f, mode, encoding=encoding) +else: + def open_file(f, mode, encoding='utf8'): + return open(f, mode) + + +class DataSet(object): + def __init__(self, url, **kwargs): + if isinstance(url, Database): + self._url = None + self._database = url + self._database_path = self._database.database + else: + self._url = url + parse_result = urlparse(url) + self._database_path = parse_result.path[1:] + + # Connect to the database. + self._database = connect(url) + + # Open a connection if one does not already exist. + self._database.connect(reuse_if_open=True) + + # Introspect the database and generate models. + self._introspector = Introspector.from_database(self._database) + self._models = self._introspector.generate_models( + skip_invalid=True, + literal_column_names=True, + **kwargs) + self._migrator = SchemaMigrator.from_database(self._database) + + class BaseModel(Model): + class Meta: + database = self._database + self._base_model = BaseModel + self._export_formats = self.get_export_formats() + self._import_formats = self.get_import_formats() + + def __repr__(self): + return '' % self._database_path + + def get_export_formats(self): + return { + 'csv': CSVExporter, + 'json': JSONExporter, + 'tsv': TSVExporter} + + def get_import_formats(self): + return { + 'csv': CSVImporter, + 'json': JSONImporter, + 'tsv': TSVImporter} + + def __getitem__(self, table): + if table not in self._models and table in self.tables: + self.update_cache(table) + return Table(self, table, self._models.get(table)) + + @property + def tables(self): + return self._database.get_tables() + + def __contains__(self, table): + return table in self.tables + + def connect(self, reuse_if_open=False): + self._database.connect(reuse_if_open=reuse_if_open) + + def close(self): + self._database.close() + + def update_cache(self, table=None): + if table: + dependencies = [table] + if table in self._models: + model_class = self._models[table] + dependencies.extend([ + related._meta.table_name for _, related, _ in + model_class._meta.model_graph()]) + else: + dependencies.extend(self.get_table_dependencies(table)) + else: + dependencies = None # Update all tables. + self._models = {} + updated = self._introspector.generate_models( + skip_invalid=True, + table_names=dependencies, + literal_column_names=True) + self._models.update(updated) + + def get_table_dependencies(self, table): + stack = [table] + accum = [] + seen = set() + while stack: + table = stack.pop() + for fk_meta in self._database.get_foreign_keys(table): + dest = fk_meta.dest_table + if dest not in seen: + stack.append(dest) + accum.append(dest) + return accum + + def __enter__(self): + self.connect() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if not self._database.is_closed(): + self.close() + + def query(self, sql, params=None, commit=True): + return self._database.execute_sql(sql, params, commit) + + def transaction(self): + if self._database.transaction_depth() == 0: + return self._database.transaction() + else: + return self._database.savepoint() + + def _check_arguments(self, filename, file_obj, format, format_dict): + if filename and file_obj: + raise ValueError('file is over-specified. Please use either ' + 'filename or file_obj, but not both.') + if not filename and not file_obj: + raise ValueError('A filename or file-like object must be ' + 'specified.') + if format not in format_dict: + valid_formats = ', '.join(sorted(format_dict.keys())) + raise ValueError('Unsupported format "%s". Use one of %s.' % ( + format, valid_formats)) + + def freeze(self, query, format='csv', filename=None, file_obj=None, + encoding='utf8', **kwargs): + self._check_arguments(filename, file_obj, format, self._export_formats) + if filename: + file_obj = open_file(filename, 'w', encoding) + + exporter = self._export_formats[format](query) + exporter.export(file_obj, **kwargs) + + if filename: + file_obj.close() + + def thaw(self, table, format='csv', filename=None, file_obj=None, + strict=False, encoding='utf8', **kwargs): + self._check_arguments(filename, file_obj, format, self._export_formats) + if filename: + file_obj = open_file(filename, 'r', encoding) + + importer = self._import_formats[format](self[table], strict) + count = importer.load(file_obj, **kwargs) + + if filename: + file_obj.close() + + return count + + +class Table(object): + def __init__(self, dataset, name, model_class): + self.dataset = dataset + self.name = name + if model_class is None: + model_class = self._create_model() + model_class.create_table() + self.dataset._models[name] = model_class + + @property + def model_class(self): + return self.dataset._models[self.name] + + def __repr__(self): + return '' % self.name + + def __len__(self): + return self.find().count() + + def __iter__(self): + return iter(self.find().iterator()) + + def _create_model(self): + class Meta: + table_name = self.name + return type( + str(self.name), + (self.dataset._base_model,), + {'Meta': Meta}) + + def create_index(self, columns, unique=False): + index = ModelIndex(self.model_class, columns, unique=unique) + self.model_class.add_index(index) + self.dataset._database.execute(index) + + def _guess_field_type(self, value): + if isinstance(value, basestring): + return TextField + if isinstance(value, (datetime.date, datetime.datetime)): + return DateTimeField + elif value is True or value is False: + return BooleanField + elif isinstance(value, int): + return IntegerField + elif isinstance(value, float): + return FloatField + elif isinstance(value, Decimal): + return DecimalField + return TextField + + @property + def columns(self): + return [f.name for f in self.model_class._meta.sorted_fields] + + def _migrate_new_columns(self, data): + new_keys = set(data) - set(self.model_class._meta.fields) + if new_keys: + operations = [] + for key in new_keys: + field_class = self._guess_field_type(data[key]) + field = field_class(null=True) + operations.append( + self.dataset._migrator.add_column(self.name, key, field)) + field.bind(self.model_class, key) + + migrate(*operations) + + self.dataset.update_cache(self.name) + + def __getitem__(self, item): + try: + return self.model_class[item] + except self.model_class.DoesNotExist: + pass + + def __setitem__(self, item, value): + if not isinstance(value, dict): + raise ValueError('Table.__setitem__() value must be a dict') + + pk = self.model_class._meta.primary_key + value[pk.name] = item + + try: + with self.dataset.transaction() as txn: + self.insert(**value) + except IntegrityError: + self.dataset.update_cache(self.name) + self.update(columns=[pk.name], **value) + + def __delitem__(self, item): + del self.model_class[item] + + def insert(self, **data): + self._migrate_new_columns(data) + return self.model_class.insert(**data).execute() + + def _apply_where(self, query, filters, conjunction=None): + conjunction = conjunction or operator.and_ + if filters: + expressions = [ + (self.model_class._meta.fields[column] == value) + for column, value in filters.items()] + query = query.where(reduce(conjunction, expressions)) + return query + + def update(self, columns=None, conjunction=None, **data): + self._migrate_new_columns(data) + filters = {} + if columns: + for column in columns: + filters[column] = data.pop(column) + + return self._apply_where( + self.model_class.update(**data), + filters, + conjunction).execute() + + def _query(self, **query): + return self._apply_where(self.model_class.select(), query) + + def find(self, **query): + return self._query(**query).dicts() + + def find_one(self, **query): + try: + return self.find(**query).get() + except self.model_class.DoesNotExist: + return None + + def all(self): + return self.find() + + def delete(self, **query): + return self._apply_where(self.model_class.delete(), query).execute() + + def freeze(self, *args, **kwargs): + return self.dataset.freeze(self.all(), *args, **kwargs) + + def thaw(self, *args, **kwargs): + return self.dataset.thaw(self.name, *args, **kwargs) + + +class Exporter(object): + def __init__(self, query): + self.query = query + + def export(self, file_obj): + raise NotImplementedError + + +class JSONExporter(Exporter): + def __init__(self, query, iso8601_datetimes=False): + super(JSONExporter, self).__init__(query) + self.iso8601_datetimes = iso8601_datetimes + + def _make_default(self): + datetime_types = (datetime.datetime, datetime.date, datetime.time) + + if self.iso8601_datetimes: + def default(o): + if isinstance(o, datetime_types): + return o.isoformat() + elif isinstance(o, (Decimal, uuid.UUID)): + return str(o) + raise TypeError('Unable to serialize %r as JSON' % o) + else: + def default(o): + if isinstance(o, datetime_types + (Decimal, uuid.UUID)): + return str(o) + raise TypeError('Unable to serialize %r as JSON' % o) + return default + + def export(self, file_obj, **kwargs): + json.dump( + list(self.query), + file_obj, + default=self._make_default(), + **kwargs) + + +class CSVExporter(Exporter): + def export(self, file_obj, header=True, **kwargs): + writer = csv.writer(file_obj, **kwargs) + tuples = self.query.tuples().execute() + tuples.initialize() + if header and getattr(tuples, 'columns', None): + writer.writerow([column for column in tuples.columns]) + for row in tuples: + writer.writerow(row) + + +class TSVExporter(CSVExporter): + def export(self, file_obj, header=True, **kwargs): + kwargs.setdefault('delimiter', '\t') + return super(TSVExporter, self).export(file_obj, header, **kwargs) + + +class Importer(object): + def __init__(self, table, strict=False): + self.table = table + self.strict = strict + + model = self.table.model_class + self.columns = model._meta.columns + self.columns.update(model._meta.fields) + + def load(self, file_obj): + raise NotImplementedError + + +class JSONImporter(Importer): + def load(self, file_obj, **kwargs): + data = json.load(file_obj, **kwargs) + count = 0 + + for row in data: + if self.strict: + obj = {} + for key in row: + field = self.columns.get(key) + if field is not None: + obj[field.name] = field.python_value(row[key]) + else: + obj = row + + if obj: + self.table.insert(**obj) + count += 1 + + return count + + +class CSVImporter(Importer): + def load(self, file_obj, header=True, **kwargs): + count = 0 + reader = csv.reader(file_obj, **kwargs) + if header: + try: + header_keys = next(reader) + except StopIteration: + return count + + if self.strict: + header_fields = [] + for idx, key in enumerate(header_keys): + if key in self.columns: + header_fields.append((idx, self.columns[key])) + else: + header_fields = list(enumerate(header_keys)) + else: + header_fields = list(enumerate(self.model._meta.sorted_fields)) + + if not header_fields: + return count + + for row in reader: + obj = {} + for idx, field in header_fields: + if self.strict: + obj[field.name] = field.python_value(row[idx]) + else: + obj[field] = row[idx] + + self.table.insert(**obj) + count += 1 + + return count + + +class TSVImporter(CSVImporter): + def load(self, file_obj, header=True, **kwargs): + kwargs.setdefault('delimiter', '\t') + return super(TSVImporter, self).load(file_obj, header, **kwargs) diff --git a/python3.9libs/playhouse/db_url.py b/python3.9libs/playhouse/db_url.py new file mode 100644 index 0000000..7176c80 --- /dev/null +++ b/python3.9libs/playhouse/db_url.py @@ -0,0 +1,130 @@ +try: + from urlparse import parse_qsl, unquote, urlparse +except ImportError: + from urllib.parse import parse_qsl, unquote, urlparse + +from peewee import * +from playhouse.cockroachdb import CockroachDatabase +from playhouse.cockroachdb import PooledCockroachDatabase +from playhouse.pool import PooledMySQLDatabase +from playhouse.pool import PooledPostgresqlDatabase +from playhouse.pool import PooledSqliteDatabase +from playhouse.pool import PooledSqliteExtDatabase +from playhouse.sqlite_ext import SqliteExtDatabase + + +schemes = { + 'cockroachdb': CockroachDatabase, + 'cockroachdb+pool': PooledCockroachDatabase, + 'crdb': CockroachDatabase, + 'crdb+pool': PooledCockroachDatabase, + 'mysql': MySQLDatabase, + 'mysql+pool': PooledMySQLDatabase, + 'postgres': PostgresqlDatabase, + 'postgresql': PostgresqlDatabase, + 'postgres+pool': PooledPostgresqlDatabase, + 'postgresql+pool': PooledPostgresqlDatabase, + 'sqlite': SqliteDatabase, + 'sqliteext': SqliteExtDatabase, + 'sqlite+pool': PooledSqliteDatabase, + 'sqliteext+pool': PooledSqliteExtDatabase, +} + +def register_database(db_class, *names): + global schemes + for name in names: + schemes[name] = db_class + +def parseresult_to_dict(parsed, unquote_password=False): + + # urlparse in python 2.6 is broken so query will be empty and instead + # appended to path complete with '?' + path_parts = parsed.path[1:].split('?') + try: + query = path_parts[1] + except IndexError: + query = parsed.query + + connect_kwargs = {'database': path_parts[0]} + if parsed.username: + connect_kwargs['user'] = parsed.username + if parsed.password: + connect_kwargs['password'] = parsed.password + if unquote_password: + connect_kwargs['password'] = unquote(connect_kwargs['password']) + if parsed.hostname: + connect_kwargs['host'] = parsed.hostname + if parsed.port: + connect_kwargs['port'] = parsed.port + + # Adjust parameters for MySQL. + if parsed.scheme == 'mysql' and 'password' in connect_kwargs: + connect_kwargs['passwd'] = connect_kwargs.pop('password') + elif 'sqlite' in parsed.scheme and not connect_kwargs['database']: + connect_kwargs['database'] = ':memory:' + + # Get additional connection args from the query string + qs_args = parse_qsl(query, keep_blank_values=True) + for key, value in qs_args: + if value.lower() == 'false': + value = False + elif value.lower() == 'true': + value = True + elif value.isdigit(): + value = int(value) + elif '.' in value and all(p.isdigit() for p in value.split('.', 1)): + try: + value = float(value) + except ValueError: + pass + elif value.lower() in ('null', 'none'): + value = None + + connect_kwargs[key] = value + + return connect_kwargs + +def parse(url, unquote_password=False): + parsed = urlparse(url) + return parseresult_to_dict(parsed, unquote_password) + +def connect(url, unquote_password=False, **connect_params): + parsed = urlparse(url) + connect_kwargs = parseresult_to_dict(parsed, unquote_password) + connect_kwargs.update(connect_params) + database_class = schemes.get(parsed.scheme) + + if database_class is None: + if database_class in schemes: + raise RuntimeError('Attempted to use "%s" but a required library ' + 'could not be imported.' % parsed.scheme) + else: + raise RuntimeError('Unrecognized or unsupported scheme: "%s".' % + parsed.scheme) + + return database_class(**connect_kwargs) + +# Conditionally register additional databases. +try: + from playhouse.pool import PooledPostgresqlExtDatabase +except ImportError: + pass +else: + register_database( + PooledPostgresqlExtDatabase, + 'postgresext+pool', + 'postgresqlext+pool') + +try: + from playhouse.apsw_ext import APSWDatabase +except ImportError: + pass +else: + register_database(APSWDatabase, 'apsw') + +try: + from playhouse.postgres_ext import PostgresqlExtDatabase +except ImportError: + pass +else: + register_database(PostgresqlExtDatabase, 'postgresext', 'postgresqlext') diff --git a/python3.9libs/playhouse/fields.py b/python3.9libs/playhouse/fields.py new file mode 100644 index 0000000..d024149 --- /dev/null +++ b/python3.9libs/playhouse/fields.py @@ -0,0 +1,60 @@ +try: + import bz2 +except ImportError: + bz2 = None +try: + import zlib +except ImportError: + zlib = None +try: + import cPickle as pickle +except ImportError: + import pickle + +from peewee import BlobField +from peewee import buffer_type + + +class CompressedField(BlobField): + ZLIB = 'zlib' + BZ2 = 'bz2' + algorithm_to_import = { + ZLIB: zlib, + BZ2: bz2, + } + + def __init__(self, compression_level=6, algorithm=ZLIB, *args, + **kwargs): + self.compression_level = compression_level + if algorithm not in self.algorithm_to_import: + raise ValueError('Unrecognized algorithm %s' % algorithm) + compress_module = self.algorithm_to_import[algorithm] + if compress_module is None: + raise ValueError('Missing library required for %s.' % algorithm) + + self.algorithm = algorithm + self.compress = compress_module.compress + self.decompress = compress_module.decompress + super(CompressedField, self).__init__(*args, **kwargs) + + def python_value(self, value): + if value is not None: + return self.decompress(value) + + def db_value(self, value): + if value is not None: + return self._constructor( + self.compress(value, self.compression_level)) + + +class PickleField(BlobField): + def python_value(self, value): + if value is not None: + if isinstance(value, buffer_type): + value = bytes(value) + return pickle.loads(value) + + def db_value(self, value): + if value is not None: + pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) + return self._constructor(pickled) diff --git a/python3.9libs/playhouse/flask_utils.py b/python3.9libs/playhouse/flask_utils.py new file mode 100644 index 0000000..76a2a62 --- /dev/null +++ b/python3.9libs/playhouse/flask_utils.py @@ -0,0 +1,185 @@ +import math +import sys + +from flask import abort +from flask import render_template +from flask import request +from peewee import Database +from peewee import DoesNotExist +from peewee import Model +from peewee import Proxy +from peewee import SelectQuery +from playhouse.db_url import connect as db_url_connect + + +class PaginatedQuery(object): + def __init__(self, query_or_model, paginate_by, page_var='page', page=None, + check_bounds=False): + self.paginate_by = paginate_by + self.page_var = page_var + self.page = page or None + self.check_bounds = check_bounds + + if isinstance(query_or_model, SelectQuery): + self.query = query_or_model + self.model = self.query.model + else: + self.model = query_or_model + self.query = self.model.select() + + def get_page(self): + if self.page is not None: + return self.page + + curr_page = request.args.get(self.page_var) + if curr_page and curr_page.isdigit(): + return max(1, int(curr_page)) + return 1 + + def get_page_count(self): + if not hasattr(self, '_page_count'): + self._page_count = int(math.ceil( + float(self.query.count()) / self.paginate_by)) + return self._page_count + + def get_object_list(self): + if self.check_bounds and self.get_page() > self.get_page_count(): + abort(404) + return self.query.paginate(self.get_page(), self.paginate_by) + + +def get_object_or_404(query_or_model, *query): + if not isinstance(query_or_model, SelectQuery): + query_or_model = query_or_model.select() + try: + return query_or_model.where(*query).get() + except DoesNotExist: + abort(404) + +def object_list(template_name, query, context_variable='object_list', + paginate_by=20, page_var='page', page=None, check_bounds=True, + **kwargs): + paginated_query = PaginatedQuery( + query, + paginate_by=paginate_by, + page_var=page_var, + page=page, + check_bounds=check_bounds) + kwargs[context_variable] = paginated_query.get_object_list() + return render_template( + template_name, + pagination=paginated_query, + page=paginated_query.get_page(), + **kwargs) + +def get_current_url(): + if not request.query_string: + return request.path + return '%s?%s' % (request.path, request.query_string) + +def get_next_url(default='/'): + if request.args.get('next'): + return request.args['next'] + elif request.form.get('next'): + return request.form['next'] + return default + +class FlaskDB(object): + def __init__(self, app=None, database=None, model_class=Model): + self.database = None # Reference to actual Peewee database instance. + self.base_model_class = model_class + self._app = app + self._db = database # dict, url, Database, or None (default). + if app is not None: + self.init_app(app) + + def init_app(self, app): + self._app = app + + if self._db is None: + if 'DATABASE' in app.config: + initial_db = app.config['DATABASE'] + elif 'DATABASE_URL' in app.config: + initial_db = app.config['DATABASE_URL'] + else: + raise ValueError('Missing required configuration data for ' + 'database: DATABASE or DATABASE_URL.') + else: + initial_db = self._db + + self._load_database(app, initial_db) + self._register_handlers(app) + + def _load_database(self, app, config_value): + if isinstance(config_value, Database): + database = config_value + elif isinstance(config_value, dict): + database = self._load_from_config_dict(dict(config_value)) + else: + # Assume a database connection URL. + database = db_url_connect(config_value) + + if isinstance(self.database, Proxy): + self.database.initialize(database) + else: + self.database = database + + def _load_from_config_dict(self, config_dict): + try: + name = config_dict.pop('name') + engine = config_dict.pop('engine') + except KeyError: + raise RuntimeError('DATABASE configuration must specify a ' + '`name` and `engine`.') + + if '.' in engine: + path, class_name = engine.rsplit('.', 1) + else: + path, class_name = 'peewee', engine + + try: + __import__(path) + module = sys.modules[path] + database_class = getattr(module, class_name) + assert issubclass(database_class, Database) + except ImportError: + raise RuntimeError('Unable to import %s' % engine) + except AttributeError: + raise RuntimeError('Database engine not found %s' % engine) + except AssertionError: + raise RuntimeError('Database engine not a subclass of ' + 'peewee.Database: %s' % engine) + + return database_class(name, **config_dict) + + def _register_handlers(self, app): + app.before_request(self.connect_db) + app.teardown_request(self.close_db) + + def get_model_class(self): + if self.database is None: + raise RuntimeError('Database must be initialized.') + + class BaseModel(self.base_model_class): + class Meta: + database = self.database + + return BaseModel + + @property + def Model(self): + if self._app is None: + database = getattr(self, 'database', None) + if database is None: + self.database = Proxy() + + if not hasattr(self, '_model_class'): + self._model_class = self.get_model_class() + return self._model_class + + def connect_db(self): + self.database.connect() + + def close_db(self, exc): + if not self.database.is_closed(): + self.database.close() diff --git a/python3.9libs/playhouse/hybrid.py b/python3.9libs/playhouse/hybrid.py new file mode 100644 index 0000000..50531cc --- /dev/null +++ b/python3.9libs/playhouse/hybrid.py @@ -0,0 +1,53 @@ +from peewee import ModelDescriptor + + +# Hybrid methods/attributes, based on similar functionality in SQLAlchemy: +# http://docs.sqlalchemy.org/en/improve_toc/orm/extensions/hybrid.html +class hybrid_method(ModelDescriptor): + def __init__(self, func, expr=None): + self.func = func + self.expr = expr or func + + def __get__(self, instance, instance_type): + if instance is None: + return self.expr.__get__(instance_type, instance_type.__class__) + return self.func.__get__(instance, instance_type) + + def expression(self, expr): + self.expr = expr + return self + + +class hybrid_property(ModelDescriptor): + def __init__(self, fget, fset=None, fdel=None, expr=None): + self.fget = fget + self.fset = fset + self.fdel = fdel + self.expr = expr or fget + + def __get__(self, instance, instance_type): + if instance is None: + return self.expr(instance_type) + return self.fget(instance) + + def __set__(self, instance, value): + if self.fset is None: + raise AttributeError('Cannot set attribute.') + self.fset(instance, value) + + def __delete__(self, instance): + if self.fdel is None: + raise AttributeError('Cannot delete attribute.') + self.fdel(instance) + + def setter(self, fset): + self.fset = fset + return self + + def deleter(self, fdel): + self.fdel = fdel + return self + + def expression(self, expr): + self.expr = expr + return self diff --git a/python3.9libs/playhouse/kv.py b/python3.9libs/playhouse/kv.py new file mode 100644 index 0000000..742b49c --- /dev/null +++ b/python3.9libs/playhouse/kv.py @@ -0,0 +1,172 @@ +import operator + +from peewee import * +from peewee import Expression +from playhouse.fields import PickleField +try: + from playhouse.sqlite_ext import CSqliteExtDatabase as SqliteExtDatabase +except ImportError: + from playhouse.sqlite_ext import SqliteExtDatabase + + +Sentinel = type('Sentinel', (object,), {}) + + +class KeyValue(object): + """ + Persistent dictionary. + + :param Field key_field: field to use for key. Defaults to CharField. + :param Field value_field: field to use for value. Defaults to PickleField. + :param bool ordered: data should be returned in key-sorted order. + :param Database database: database where key/value data is stored. + :param str table_name: table name for data. + """ + def __init__(self, key_field=None, value_field=None, ordered=False, + database=None, table_name='keyvalue'): + if key_field is None: + key_field = CharField(max_length=255, primary_key=True) + if not key_field.primary_key: + raise ValueError('key_field must have primary_key=True.') + + if value_field is None: + value_field = PickleField() + + self._key_field = key_field + self._value_field = value_field + self._ordered = ordered + self._database = database or SqliteExtDatabase(':memory:') + self._table_name = table_name + if isinstance(self._database, PostgresqlDatabase): + self.upsert = self._postgres_upsert + self.update = self._postgres_update + else: + self.upsert = self._upsert + self.update = self._update + + self.model = self.create_model() + self.key = self.model.key + self.value = self.model.value + + # Ensure table exists. + self.model.create_table() + + def create_model(self): + class KeyValue(Model): + key = self._key_field + value = self._value_field + class Meta: + database = self._database + table_name = self._table_name + return KeyValue + + def query(self, *select): + query = self.model.select(*select).tuples() + if self._ordered: + query = query.order_by(self.key) + return query + + def convert_expression(self, expr): + if not isinstance(expr, Expression): + return (self.key == expr), True + return expr, False + + def __contains__(self, key): + expr, _ = self.convert_expression(key) + return self.model.select().where(expr).exists() + + def __len__(self): + return len(self.model) + + def __getitem__(self, expr): + converted, is_single = self.convert_expression(expr) + query = self.query(self.value).where(converted) + item_getter = operator.itemgetter(0) + result = [item_getter(row) for row in query] + if len(result) == 0 and is_single: + raise KeyError(expr) + elif is_single: + return result[0] + return result + + def _upsert(self, key, value): + (self.model + .insert(key=key, value=value) + .on_conflict('replace') + .execute()) + + def _postgres_upsert(self, key, value): + (self.model + .insert(key=key, value=value) + .on_conflict(conflict_target=[self.key], + preserve=[self.value]) + .execute()) + + def __setitem__(self, expr, value): + if isinstance(expr, Expression): + self.model.update(value=value).where(expr).execute() + else: + self.upsert(expr, value) + + def __delitem__(self, expr): + converted, _ = self.convert_expression(expr) + self.model.delete().where(converted).execute() + + def __iter__(self): + return iter(self.query().execute()) + + def keys(self): + return map(operator.itemgetter(0), self.query(self.key)) + + def values(self): + return map(operator.itemgetter(0), self.query(self.value)) + + def items(self): + return iter(self.query().execute()) + + def _update(self, __data=None, **mapping): + if __data is not None: + mapping.update(__data) + return (self.model + .insert_many(list(mapping.items()), + fields=[self.key, self.value]) + .on_conflict('replace') + .execute()) + + def _postgres_update(self, __data=None, **mapping): + if __data is not None: + mapping.update(__data) + return (self.model + .insert_many(list(mapping.items()), + fields=[self.key, self.value]) + .on_conflict(conflict_target=[self.key], + preserve=[self.value]) + .execute()) + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key, default=None): + try: + return self[key] + except KeyError: + self[key] = default + return default + + def pop(self, key, default=Sentinel): + with self._database.atomic(): + try: + result = self[key] + except KeyError: + if default is Sentinel: + raise + return default + del self[key] + + return result + + def clear(self): + self.model.delete().execute() diff --git a/python3.9libs/playhouse/migrate.py b/python3.9libs/playhouse/migrate.py new file mode 100644 index 0000000..f536a45 --- /dev/null +++ b/python3.9libs/playhouse/migrate.py @@ -0,0 +1,886 @@ +""" +Lightweight schema migrations. + +NOTE: Currently tested with SQLite and Postgresql. MySQL may be missing some +features. + +Example Usage +------------- + +Instantiate a migrator: + + # Postgres example: + my_db = PostgresqlDatabase(...) + migrator = PostgresqlMigrator(my_db) + + # SQLite example: + my_db = SqliteDatabase('my_database.db') + migrator = SqliteMigrator(my_db) + +Then you will use the `migrate` function to run various `Operation`s which +are generated by the migrator: + + migrate( + migrator.add_column('some_table', 'column_name', CharField(default='')) + ) + +Migrations are not run inside a transaction, so if you wish the migration to +run in a transaction you will need to wrap the call to `migrate` in a +transaction block, e.g.: + + with my_db.transaction(): + migrate(...) + +Supported Operations +-------------------- + +Add new field(s) to an existing model: + + # Create your field instances. For non-null fields you must specify a + # default value. + pubdate_field = DateTimeField(null=True) + comment_field = TextField(default='') + + # Run the migration, specifying the database table, field name and field. + migrate( + migrator.add_column('comment_tbl', 'pub_date', pubdate_field), + migrator.add_column('comment_tbl', 'comment', comment_field), + ) + +Renaming a field: + + # Specify the table, original name of the column, and its new name. + migrate( + migrator.rename_column('story', 'pub_date', 'publish_date'), + migrator.rename_column('story', 'mod_date', 'modified_date'), + ) + +Dropping a field: + + migrate( + migrator.drop_column('story', 'some_old_field'), + ) + +Making a field nullable or not nullable: + + # Note that when making a field not null that field must not have any + # NULL values present. + migrate( + # Make `pub_date` allow NULL values. + migrator.drop_not_null('story', 'pub_date'), + + # Prevent `modified_date` from containing NULL values. + migrator.add_not_null('story', 'modified_date'), + ) + +Renaming a table: + + migrate( + migrator.rename_table('story', 'stories_tbl'), + ) + +Adding an index: + + # Specify the table, column names, and whether the index should be + # UNIQUE or not. + migrate( + # Create an index on the `pub_date` column. + migrator.add_index('story', ('pub_date',), False), + + # Create a multi-column index on the `pub_date` and `status` fields. + migrator.add_index('story', ('pub_date', 'status'), False), + + # Create a unique index on the category and title fields. + migrator.add_index('story', ('category_id', 'title'), True), + ) + +Dropping an index: + + # Specify the index name. + migrate(migrator.drop_index('story', 'story_pub_date_status')) + +Adding or dropping table constraints: + +.. code-block:: python + + # Add a CHECK() constraint to enforce the price cannot be negative. + migrate(migrator.add_constraint( + 'products', + 'price_check', + Check('price >= 0'))) + + # Remove the price check constraint. + migrate(migrator.drop_constraint('products', 'price_check')) + + # Add a UNIQUE constraint on the first and last names. + migrate(migrator.add_unique('person', 'first_name', 'last_name')) +""" +from collections import namedtuple +import functools +import hashlib +import re + +from peewee import * +from peewee import CommaNodeList +from peewee import EnclosedNodeList +from peewee import Entity +from peewee import Expression +from peewee import Node +from peewee import NodeList +from peewee import OP +from peewee import callable_ +from peewee import sort_models +from peewee import _truncate_constraint_name +try: + from playhouse.cockroachdb import CockroachDatabase +except ImportError: + CockroachDatabase = None + + +class Operation(object): + """Encapsulate a single schema altering operation.""" + def __init__(self, migrator, method, *args, **kwargs): + self.migrator = migrator + self.method = method + self.args = args + self.kwargs = kwargs + + def execute(self, node): + self.migrator.database.execute(node) + + def _handle_result(self, result): + if isinstance(result, (Node, Context)): + self.execute(result) + elif isinstance(result, Operation): + result.run() + elif isinstance(result, (list, tuple)): + for item in result: + self._handle_result(item) + + def run(self): + kwargs = self.kwargs.copy() + kwargs['with_context'] = True + method = getattr(self.migrator, self.method) + self._handle_result(method(*self.args, **kwargs)) + + +def operation(fn): + @functools.wraps(fn) + def inner(self, *args, **kwargs): + with_context = kwargs.pop('with_context', False) + if with_context: + return fn(self, *args, **kwargs) + return Operation(self, fn.__name__, *args, **kwargs) + return inner + + +def make_index_name(table_name, columns): + index_name = '_'.join((table_name,) + tuple(columns)) + if len(index_name) > 64: + index_hash = hashlib.md5(index_name.encode('utf-8')).hexdigest() + index_name = '%s_%s' % (index_name[:56], index_hash[:7]) + return index_name + + +class SchemaMigrator(object): + explicit_create_foreign_key = False + explicit_delete_foreign_key = False + + def __init__(self, database): + self.database = database + + def make_context(self): + return self.database.get_sql_context() + + @classmethod + def from_database(cls, database): + if CockroachDatabase and isinstance(database, CockroachDatabase): + return CockroachDBMigrator(database) + elif isinstance(database, PostgresqlDatabase): + return PostgresqlMigrator(database) + elif isinstance(database, MySQLDatabase): + return MySQLMigrator(database) + elif isinstance(database, SqliteDatabase): + return SqliteMigrator(database) + raise ValueError('Unsupported database: %s' % database) + + @operation + def apply_default(self, table, column_name, field): + default = field.default + if callable_(default): + default = default() + + return (self.make_context() + .literal('UPDATE ') + .sql(Entity(table)) + .literal(' SET ') + .sql(Expression( + Entity(column_name), + OP.EQ, + field.db_value(default), + flat=True))) + + def _alter_table(self, ctx, table): + return ctx.literal('ALTER TABLE ').sql(Entity(table)) + + def _alter_column(self, ctx, table, column): + return (self + ._alter_table(ctx, table) + .literal(' ALTER COLUMN ') + .sql(Entity(column))) + + @operation + def alter_add_column(self, table, column_name, field): + # Make field null at first. + ctx = self.make_context() + field_null, field.null = field.null, True + + # Set the field's column-name and name, if it is not set or doesn't + # match the new value. + if field.column_name != column_name: + field.name = field.column_name = column_name + + (self + ._alter_table(ctx, table) + .literal(' ADD COLUMN ') + .sql(field.ddl(ctx))) + + field.null = field_null + if isinstance(field, ForeignKeyField): + self.add_inline_fk_sql(ctx, field) + return ctx + + @operation + def add_constraint(self, table, name, constraint): + return (self + ._alter_table(self.make_context(), table) + .literal(' ADD CONSTRAINT ') + .sql(Entity(name)) + .literal(' ') + .sql(constraint)) + + @operation + def add_unique(self, table, *column_names): + constraint_name = 'uniq_%s' % '_'.join(column_names) + constraint = NodeList(( + SQL('UNIQUE'), + EnclosedNodeList([Entity(column) for column in column_names]))) + return self.add_constraint(table, constraint_name, constraint) + + @operation + def drop_constraint(self, table, name): + return (self + ._alter_table(self.make_context(), table) + .literal(' DROP CONSTRAINT ') + .sql(Entity(name))) + + def add_inline_fk_sql(self, ctx, field): + ctx = (ctx + .literal(' REFERENCES ') + .sql(Entity(field.rel_model._meta.table_name)) + .literal(' ') + .sql(EnclosedNodeList((Entity(field.rel_field.column_name),)))) + if field.on_delete is not None: + ctx = ctx.literal(' ON DELETE %s' % field.on_delete) + if field.on_update is not None: + ctx = ctx.literal(' ON UPDATE %s' % field.on_update) + return ctx + + @operation + def add_foreign_key_constraint(self, table, column_name, rel, rel_column, + on_delete=None, on_update=None): + constraint = 'fk_%s_%s_refs_%s' % (table, column_name, rel) + ctx = (self + .make_context() + .literal('ALTER TABLE ') + .sql(Entity(table)) + .literal(' ADD CONSTRAINT ') + .sql(Entity(_truncate_constraint_name(constraint))) + .literal(' FOREIGN KEY ') + .sql(EnclosedNodeList((Entity(column_name),))) + .literal(' REFERENCES ') + .sql(Entity(rel)) + .literal(' (') + .sql(Entity(rel_column)) + .literal(')')) + if on_delete is not None: + ctx = ctx.literal(' ON DELETE %s' % on_delete) + if on_update is not None: + ctx = ctx.literal(' ON UPDATE %s' % on_update) + return ctx + + @operation + def add_column(self, table, column_name, field): + # Adding a column is complicated by the fact that if there are rows + # present and the field is non-null, then we need to first add the + # column as a nullable field, then set the value, then add a not null + # constraint. + if not field.null and field.default is None: + raise ValueError('%s is not null but has no default' % column_name) + + is_foreign_key = isinstance(field, ForeignKeyField) + if is_foreign_key and not field.rel_field: + raise ValueError('Foreign keys must specify a `field`.') + + operations = [self.alter_add_column(table, column_name, field)] + + # In the event the field is *not* nullable, update with the default + # value and set not null. + if not field.null: + operations.extend([ + self.apply_default(table, column_name, field), + self.add_not_null(table, column_name)]) + + if is_foreign_key and self.explicit_create_foreign_key: + operations.append( + self.add_foreign_key_constraint( + table, + column_name, + field.rel_model._meta.table_name, + field.rel_field.column_name, + field.on_delete, + field.on_update)) + + if field.index or field.unique: + using = getattr(field, 'index_type', None) + operations.append(self.add_index(table, (column_name,), + field.unique, using)) + + return operations + + @operation + def drop_foreign_key_constraint(self, table, column_name): + raise NotImplementedError + + @operation + def drop_column(self, table, column_name, cascade=True): + ctx = self.make_context() + (self._alter_table(ctx, table) + .literal(' DROP COLUMN ') + .sql(Entity(column_name))) + + if cascade: + ctx.literal(' CASCADE') + + fk_columns = [ + foreign_key.column + for foreign_key in self.database.get_foreign_keys(table)] + if column_name in fk_columns and self.explicit_delete_foreign_key: + return [self.drop_foreign_key_constraint(table, column_name), ctx] + + return ctx + + @operation + def rename_column(self, table, old_name, new_name): + return (self + ._alter_table(self.make_context(), table) + .literal(' RENAME COLUMN ') + .sql(Entity(old_name)) + .literal(' TO ') + .sql(Entity(new_name))) + + @operation + def add_not_null(self, table, column): + return (self + ._alter_column(self.make_context(), table, column) + .literal(' SET NOT NULL')) + + @operation + def drop_not_null(self, table, column): + return (self + ._alter_column(self.make_context(), table, column) + .literal(' DROP NOT NULL')) + + @operation + def alter_column_type(self, table, column, field, cast=None): + # ALTER TABLE
ALTER COLUMN + ctx = self.make_context() + ctx = (self + ._alter_column(ctx, table, column) + .literal(' TYPE ') + .sql(field.ddl_datatype(ctx))) + if cast is not None: + if not isinstance(cast, Node): + cast = SQL(cast) + ctx = ctx.literal(' USING ').sql(cast) + return ctx + + @operation + def rename_table(self, old_name, new_name): + return (self + ._alter_table(self.make_context(), old_name) + .literal(' RENAME TO ') + .sql(Entity(new_name))) + + @operation + def add_index(self, table, columns, unique=False, using=None): + ctx = self.make_context() + index_name = make_index_name(table, columns) + table_obj = Table(table) + cols = [getattr(table_obj.c, column) for column in columns] + index = Index(index_name, table_obj, cols, unique=unique, using=using) + return ctx.sql(index) + + @operation + def drop_index(self, table, index_name): + return (self + .make_context() + .literal('DROP INDEX ') + .sql(Entity(index_name))) + + +class PostgresqlMigrator(SchemaMigrator): + def _primary_key_columns(self, tbl): + query = """ + SELECT pg_attribute.attname + FROM pg_index, pg_class, pg_attribute + WHERE + pg_class.oid = '%s'::regclass AND + indrelid = pg_class.oid AND + pg_attribute.attrelid = pg_class.oid AND + pg_attribute.attnum = any(pg_index.indkey) AND + indisprimary; + """ + cursor = self.database.execute_sql(query % tbl) + return [row[0] for row in cursor.fetchall()] + + @operation + def set_search_path(self, schema_name): + return (self + .make_context() + .literal('SET search_path TO %s' % schema_name)) + + @operation + def rename_table(self, old_name, new_name): + pk_names = self._primary_key_columns(old_name) + ParentClass = super(PostgresqlMigrator, self) + + operations = [ + ParentClass.rename_table(old_name, new_name, with_context=True)] + + if len(pk_names) == 1: + # Check for existence of primary key sequence. + seq_name = '%s_%s_seq' % (old_name, pk_names[0]) + query = """ + SELECT 1 + FROM information_schema.sequences + WHERE LOWER(sequence_name) = LOWER(%s) + """ + cursor = self.database.execute_sql(query, (seq_name,)) + if bool(cursor.fetchone()): + new_seq_name = '%s_%s_seq' % (new_name, pk_names[0]) + operations.append(ParentClass.rename_table( + seq_name, new_seq_name)) + + return operations + + +class CockroachDBMigrator(PostgresqlMigrator): + explicit_create_foreign_key = True + + def add_inline_fk_sql(self, ctx, field): + pass + + @operation + def drop_index(self, table, index_name): + return (self + .make_context() + .literal('DROP INDEX ') + .sql(Entity(index_name)) + .literal(' CASCADE')) + + +class MySQLColumn(namedtuple('_Column', ('name', 'definition', 'null', 'pk', + 'default', 'extra'))): + @property + def is_pk(self): + return self.pk == 'PRI' + + @property + def is_unique(self): + return self.pk == 'UNI' + + @property + def is_null(self): + return self.null == 'YES' + + def sql(self, column_name=None, is_null=None): + if is_null is None: + is_null = self.is_null + if column_name is None: + column_name = self.name + parts = [ + Entity(column_name), + SQL(self.definition)] + if self.is_unique: + parts.append(SQL('UNIQUE')) + if is_null: + parts.append(SQL('NULL')) + else: + parts.append(SQL('NOT NULL')) + if self.is_pk: + parts.append(SQL('PRIMARY KEY')) + if self.extra: + parts.append(SQL(self.extra)) + return NodeList(parts) + + +class MySQLMigrator(SchemaMigrator): + explicit_create_foreign_key = True + explicit_delete_foreign_key = True + + def _alter_column(self, ctx, table, column): + return (self + ._alter_table(ctx, table) + .literal(' MODIFY ') + .sql(Entity(column))) + + @operation + def rename_table(self, old_name, new_name): + return (self + .make_context() + .literal('RENAME TABLE ') + .sql(Entity(old_name)) + .literal(' TO ') + .sql(Entity(new_name))) + + def _get_column_definition(self, table, column_name): + cursor = self.database.execute_sql('DESCRIBE `%s`;' % table) + rows = cursor.fetchall() + for row in rows: + column = MySQLColumn(*row) + if column.name == column_name: + return column + return False + + def get_foreign_key_constraint(self, table, column_name): + cursor = self.database.execute_sql( + ('SELECT constraint_name ' + 'FROM information_schema.key_column_usage WHERE ' + 'table_schema = DATABASE() AND ' + 'table_name = %s AND ' + 'column_name = %s AND ' + 'referenced_table_name IS NOT NULL AND ' + 'referenced_column_name IS NOT NULL;'), + (table, column_name)) + result = cursor.fetchone() + if not result: + raise AttributeError( + 'Unable to find foreign key constraint for ' + '"%s" on table "%s".' % (table, column_name)) + return result[0] + + @operation + def drop_foreign_key_constraint(self, table, column_name): + fk_constraint = self.get_foreign_key_constraint(table, column_name) + return (self + ._alter_table(self.make_context(), table) + .literal(' DROP FOREIGN KEY ') + .sql(Entity(fk_constraint))) + + def add_inline_fk_sql(self, ctx, field): + pass + + @operation + def add_not_null(self, table, column): + column_def = self._get_column_definition(table, column) + add_not_null = (self + ._alter_table(self.make_context(), table) + .literal(' MODIFY ') + .sql(column_def.sql(is_null=False))) + + fk_objects = dict( + (fk.column, fk) + for fk in self.database.get_foreign_keys(table)) + if column not in fk_objects: + return add_not_null + + fk_metadata = fk_objects[column] + return (self.drop_foreign_key_constraint(table, column), + add_not_null, + self.add_foreign_key_constraint( + table, + column, + fk_metadata.dest_table, + fk_metadata.dest_column)) + + @operation + def drop_not_null(self, table, column): + column = self._get_column_definition(table, column) + if column.is_pk: + raise ValueError('Primary keys can not be null') + return (self + ._alter_table(self.make_context(), table) + .literal(' MODIFY ') + .sql(column.sql(is_null=True))) + + @operation + def rename_column(self, table, old_name, new_name): + fk_objects = dict( + (fk.column, fk) + for fk in self.database.get_foreign_keys(table)) + is_foreign_key = old_name in fk_objects + + column = self._get_column_definition(table, old_name) + rename_ctx = (self + ._alter_table(self.make_context(), table) + .literal(' CHANGE ') + .sql(Entity(old_name)) + .literal(' ') + .sql(column.sql(column_name=new_name))) + if is_foreign_key: + fk_metadata = fk_objects[old_name] + return [ + self.drop_foreign_key_constraint(table, old_name), + rename_ctx, + self.add_foreign_key_constraint( + table, + new_name, + fk_metadata.dest_table, + fk_metadata.dest_column), + ] + else: + return rename_ctx + + @operation + def alter_column_type(self, table, column, field, cast=None): + if cast is not None: + raise ValueError('alter_column_type() does not support cast with ' + 'MySQL.') + ctx = self.make_context() + return (self + ._alter_table(ctx, table) + .literal(' MODIFY ') + .sql(Entity(column)) + .literal(' ') + .sql(field.ddl(ctx))) + + @operation + def drop_index(self, table, index_name): + return (self + .make_context() + .literal('DROP INDEX ') + .sql(Entity(index_name)) + .literal(' ON ') + .sql(Entity(table))) + + +class SqliteMigrator(SchemaMigrator): + """ + SQLite supports a subset of ALTER TABLE queries, view the docs for the + full details http://sqlite.org/lang_altertable.html + """ + column_re = re.compile('(.+?)\((.+)\)') + column_split_re = re.compile(r'(?:[^,(]|\([^)]*\))+') + column_name_re = re.compile(r'''["`']?([\w]+)''') + fk_re = re.compile(r'FOREIGN KEY\s+\("?([\w]+)"?\)\s+', re.I) + + def _get_column_names(self, table): + res = self.database.execute_sql('select * from "%s" limit 1' % table) + return [item[0] for item in res.description] + + def _get_create_table(self, table): + res = self.database.execute_sql( + ('select name, sql from sqlite_master ' + 'where type=? and LOWER(name)=?'), + ['table', table.lower()]) + return res.fetchone() + + @operation + def _update_column(self, table, column_to_update, fn): + columns = set(column.name.lower() + for column in self.database.get_columns(table)) + if column_to_update.lower() not in columns: + raise ValueError('Column "%s" does not exist on "%s"' % + (column_to_update, table)) + + # Get the SQL used to create the given table. + table, create_table = self._get_create_table(table) + + # Get the indexes and SQL to re-create indexes. + indexes = self.database.get_indexes(table) + + # Find any foreign keys we may need to remove. + self.database.get_foreign_keys(table) + + # Make sure the create_table does not contain any newlines or tabs, + # allowing the regex to work correctly. + create_table = re.sub(r'\s+', ' ', create_table) + + # Parse out the `CREATE TABLE` and column list portions of the query. + raw_create, raw_columns = self.column_re.search(create_table).groups() + + # Clean up the individual column definitions. + split_columns = self.column_split_re.findall(raw_columns) + column_defs = [col.strip() for col in split_columns] + + new_column_defs = [] + new_column_names = [] + original_column_names = [] + constraint_terms = ('foreign ', 'primary ', 'constraint ', 'check ') + + for column_def in column_defs: + column_name, = self.column_name_re.match(column_def).groups() + + if column_name == column_to_update: + new_column_def = fn(column_name, column_def) + if new_column_def: + new_column_defs.append(new_column_def) + original_column_names.append(column_name) + column_name, = self.column_name_re.match( + new_column_def).groups() + new_column_names.append(column_name) + else: + new_column_defs.append(column_def) + + # Avoid treating constraints as columns. + if not column_def.lower().startswith(constraint_terms): + new_column_names.append(column_name) + original_column_names.append(column_name) + + # Create a mapping of original columns to new columns. + original_to_new = dict(zip(original_column_names, new_column_names)) + new_column = original_to_new.get(column_to_update) + + fk_filter_fn = lambda column_def: column_def + if not new_column: + # Remove any foreign keys associated with this column. + fk_filter_fn = lambda column_def: None + elif new_column != column_to_update: + # Update any foreign keys for this column. + fk_filter_fn = lambda column_def: self.fk_re.sub( + 'FOREIGN KEY ("%s") ' % new_column, + column_def) + + cleaned_columns = [] + for column_def in new_column_defs: + match = self.fk_re.match(column_def) + if match is not None and match.groups()[0] == column_to_update: + column_def = fk_filter_fn(column_def) + if column_def: + cleaned_columns.append(column_def) + + # Update the name of the new CREATE TABLE query. + temp_table = table + '__tmp__' + rgx = re.compile('("?)%s("?)' % table, re.I) + create = rgx.sub( + '\\1%s\\2' % temp_table, + raw_create) + + # Create the new table. + columns = ', '.join(cleaned_columns) + queries = [ + NodeList([SQL('DROP TABLE IF EXISTS'), Entity(temp_table)]), + SQL('%s (%s)' % (create.strip(), columns))] + + # Populate new table. + populate_table = NodeList(( + SQL('INSERT INTO'), + Entity(temp_table), + EnclosedNodeList([Entity(col) for col in new_column_names]), + SQL('SELECT'), + CommaNodeList([Entity(col) for col in original_column_names]), + SQL('FROM'), + Entity(table))) + drop_original = NodeList([SQL('DROP TABLE'), Entity(table)]) + + # Drop existing table and rename temp table. + queries += [ + populate_table, + drop_original, + self.rename_table(temp_table, table)] + + # Re-create user-defined indexes. User-defined indexes will have a + # non-empty SQL attribute. + for index in filter(lambda idx: idx.sql, indexes): + if column_to_update not in index.columns: + queries.append(SQL(index.sql)) + elif new_column: + sql = self._fix_index(index.sql, column_to_update, new_column) + if sql is not None: + queries.append(SQL(sql)) + + return queries + + def _fix_index(self, sql, column_to_update, new_column): + # Split on the name of the column to update. If it splits into two + # pieces, then there's no ambiguity and we can simply replace the + # old with the new. + parts = sql.split(column_to_update) + if len(parts) == 2: + return sql.replace(column_to_update, new_column) + + # Find the list of columns in the index expression. + lhs, rhs = sql.rsplit('(', 1) + + # Apply the same "split in two" logic to the column list portion of + # the query. + if len(rhs.split(column_to_update)) == 2: + return '%s(%s' % (lhs, rhs.replace(column_to_update, new_column)) + + # Strip off the trailing parentheses and go through each column. + parts = rhs.rsplit(')', 1)[0].split(',') + columns = [part.strip('"`[]\' ') for part in parts] + + # `columns` looks something like: ['status', 'timestamp" DESC'] + # https://www.sqlite.org/lang_keywords.html + # Strip out any junk after the column name. + clean = [] + for column in columns: + if re.match('%s(?:[\'"`\]]?\s|$)' % column_to_update, column): + column = new_column + column[len(column_to_update):] + clean.append(column) + + return '%s(%s)' % (lhs, ', '.join('"%s"' % c for c in clean)) + + @operation + def drop_column(self, table, column_name, cascade=True): + return self._update_column(table, column_name, lambda a, b: None) + + @operation + def rename_column(self, table, old_name, new_name): + def _rename(column_name, column_def): + return column_def.replace(column_name, new_name) + return self._update_column(table, old_name, _rename) + + @operation + def add_not_null(self, table, column): + def _add_not_null(column_name, column_def): + return column_def + ' NOT NULL' + return self._update_column(table, column, _add_not_null) + + @operation + def drop_not_null(self, table, column): + def _drop_not_null(column_name, column_def): + return column_def.replace('NOT NULL', '') + return self._update_column(table, column, _drop_not_null) + + @operation + def alter_column_type(self, table, column, field, cast=None): + if cast is not None: + raise ValueError('alter_column_type() does not support cast with ' + 'Sqlite.') + ctx = self.make_context() + def _alter_column_type(column_name, column_def): + node_list = field.ddl(ctx) + sql, _ = ctx.sql(Entity(column)).sql(node_list).query() + return sql + return self._update_column(table, column, _alter_column_type) + + @operation + def add_constraint(self, table, name, constraint): + raise NotImplementedError + + @operation + def drop_constraint(self, table, name): + raise NotImplementedError + + @operation + def add_foreign_key_constraint(self, table, column_name, field, + on_delete=None, on_update=None): + raise NotImplementedError + + +def migrate(*operations, **kwargs): + for operation in operations: + operation.run() diff --git a/python3.9libs/playhouse/mysql_ext.py b/python3.9libs/playhouse/mysql_ext.py new file mode 100644 index 0000000..4ab8f91 --- /dev/null +++ b/python3.9libs/playhouse/mysql_ext.py @@ -0,0 +1,90 @@ +import json + +try: + import mysql.connector as mysql_connector +except ImportError: + mysql_connector = None +try: + import mariadb +except ImportError: + mariadb = None + +from peewee import ImproperlyConfigured +from peewee import Insert +from peewee import MySQLDatabase +from peewee import NodeList +from peewee import SQL +from peewee import TextField +from peewee import fn + + +class MySQLConnectorDatabase(MySQLDatabase): + def _connect(self): + if mysql_connector is None: + raise ImproperlyConfigured('MySQL connector not installed!') + return mysql_connector.connect(db=self.database, **self.connect_params) + + def cursor(self, commit=None): + if self.is_closed(): + if self.autoconnect: + self.connect() + else: + raise InterfaceError('Error, database connection not opened.') + return self._state.conn.cursor(buffered=True) + + +class MariaDBConnectorDatabase(MySQLDatabase): + def _connect(self): + if mariadb is None: + raise ImproperlyConfigured('mariadb connector not installed!') + self.connect_params.pop('charset', None) + self.connect_params.pop('sql_mode', None) + self.connect_params.pop('use_unicode', None) + return mariadb.connect(db=self.database, **self.connect_params) + + def cursor(self, commit=None): + if self.is_closed(): + if self.autoconnect: + self.connect() + else: + raise InterfaceError('Error, database connection not opened.') + return self._state.conn.cursor(buffered=True) + + def _set_server_version(self, conn): + version = conn.server_version + version, point = divmod(version, 100) + version, minor = divmod(version, 100) + self.server_version = (version, minor, point) + if self.server_version >= (10, 5, 0): + self.returning_clause = True + + def last_insert_id(self, cursor, query_type=None): + if not self.returning_clause: + return cursor.lastrowid + elif query_type == Insert.SIMPLE: + try: + return cursor[0][0] + except (AttributeError, IndexError): + return cursor.lastrowid + return cursor + + +class JSONField(TextField): + field_type = 'JSON' + + def db_value(self, value): + if value is not None: + return json.dumps(value) + + def python_value(self, value): + if value is not None: + return json.loads(value) + + +def Match(columns, expr, modifier=None): + if isinstance(columns, (list, tuple)): + match = fn.MATCH(*columns) # Tuple of one or more columns / fields. + else: + match = fn.MATCH(columns) # Single column / field. + args = expr if modifier is None else NodeList((expr, SQL(modifier))) + return NodeList((match, fn.AGAINST(args))) diff --git a/python3.9libs/playhouse/pool.py b/python3.9libs/playhouse/pool.py new file mode 100644 index 0000000..2ee3b48 --- /dev/null +++ b/python3.9libs/playhouse/pool.py @@ -0,0 +1,318 @@ +""" +Lightweight connection pooling for peewee. + +In a multi-threaded application, up to `max_connections` will be opened. Each +thread (or, if using gevent, greenlet) will have it's own connection. + +In a single-threaded application, only one connection will be created. It will +be continually recycled until either it exceeds the stale timeout or is closed +explicitly (using `.manual_close()`). + +By default, all your application needs to do is ensure that connections are +closed when you are finished with them, and they will be returned to the pool. +For web applications, this typically means that at the beginning of a request, +you will open a connection, and when you return a response, you will close the +connection. + +Simple Postgres pool example code: + + # Use the special postgresql extensions. + from playhouse.pool import PooledPostgresqlExtDatabase + + db = PooledPostgresqlExtDatabase( + 'my_app', + max_connections=32, + stale_timeout=300, # 5 minutes. + user='postgres') + + class BaseModel(Model): + class Meta: + database = db + +That's it! +""" +import heapq +import logging +import random +import time +from collections import namedtuple +from itertools import chain + +try: + from psycopg2.extensions import TRANSACTION_STATUS_IDLE + from psycopg2.extensions import TRANSACTION_STATUS_INERROR + from psycopg2.extensions import TRANSACTION_STATUS_UNKNOWN +except ImportError: + TRANSACTION_STATUS_IDLE = \ + TRANSACTION_STATUS_INERROR = \ + TRANSACTION_STATUS_UNKNOWN = None + +from peewee import MySQLDatabase +from peewee import PostgresqlDatabase +from peewee import SqliteDatabase + +logger = logging.getLogger('peewee.pool') + + +def make_int(val): + if val is not None and not isinstance(val, (int, float)): + return int(val) + return val + + +class MaxConnectionsExceeded(ValueError): pass + + +PoolConnection = namedtuple('PoolConnection', ('timestamp', 'connection', + 'checked_out')) + + +class PooledDatabase(object): + def __init__(self, database, max_connections=20, stale_timeout=None, + timeout=None, **kwargs): + self._max_connections = make_int(max_connections) + self._stale_timeout = make_int(stale_timeout) + self._wait_timeout = make_int(timeout) + if self._wait_timeout == 0: + self._wait_timeout = float('inf') + + # Available / idle connections stored in a heap, sorted oldest first. + self._connections = [] + + # Mapping of connection id to PoolConnection. Ordinarily we would want + # to use something like a WeakKeyDictionary, but Python typically won't + # allow us to create weak references to connection objects. + self._in_use = {} + + # Use the memory address of the connection as the key in the event the + # connection object is not hashable. Connections will not get + # garbage-collected, however, because a reference to them will persist + # in "_in_use" as long as the conn has not been closed. + self.conn_key = id + + super(PooledDatabase, self).__init__(database, **kwargs) + + def init(self, database, max_connections=None, stale_timeout=None, + timeout=None, **connect_kwargs): + super(PooledDatabase, self).init(database, **connect_kwargs) + if max_connections is not None: + self._max_connections = make_int(max_connections) + if stale_timeout is not None: + self._stale_timeout = make_int(stale_timeout) + if timeout is not None: + self._wait_timeout = make_int(timeout) + if self._wait_timeout == 0: + self._wait_timeout = float('inf') + + def connect(self, reuse_if_open=False): + if not self._wait_timeout: + return super(PooledDatabase, self).connect(reuse_if_open) + + expires = time.time() + self._wait_timeout + while expires > time.time(): + try: + ret = super(PooledDatabase, self).connect(reuse_if_open) + except MaxConnectionsExceeded: + time.sleep(0.1) + else: + return ret + raise MaxConnectionsExceeded('Max connections exceeded, timed out ' + 'attempting to connect.') + + def _connect(self): + while True: + try: + # Remove the oldest connection from the heap. + ts, conn = heapq.heappop(self._connections) + key = self.conn_key(conn) + except IndexError: + ts = conn = None + logger.debug('No connection available in pool.') + break + else: + if self._is_closed(conn): + # This connecton was closed, but since it was not stale + # it got added back to the queue of available conns. We + # then closed it and marked it as explicitly closed, so + # it's safe to throw it away now. + # (Because Database.close() calls Database._close()). + logger.debug('Connection %s was closed.', key) + ts = conn = None + elif self._stale_timeout and self._is_stale(ts): + # If we are attempting to check out a stale connection, + # then close it. We don't need to mark it in the "closed" + # set, because it is not in the list of available conns + # anymore. + logger.debug('Connection %s was stale, closing.', key) + self._close(conn, True) + ts = conn = None + else: + break + + if conn is None: + if self._max_connections and ( + len(self._in_use) >= self._max_connections): + raise MaxConnectionsExceeded('Exceeded maximum connections.') + conn = super(PooledDatabase, self)._connect() + ts = time.time() - random.random() / 1000 + key = self.conn_key(conn) + logger.debug('Created new connection %s.', key) + + self._in_use[key] = PoolConnection(ts, conn, time.time()) + return conn + + def _is_stale(self, timestamp): + # Called on check-out and check-in to ensure the connection has + # not outlived the stale timeout. + return (time.time() - timestamp) > self._stale_timeout + + def _is_closed(self, conn): + return False + + def _can_reuse(self, conn): + # Called on check-in to make sure the connection can be re-used. + return True + + def _close(self, conn, close_conn=False): + key = self.conn_key(conn) + if close_conn: + super(PooledDatabase, self)._close(conn) + elif key in self._in_use: + pool_conn = self._in_use.pop(key) + if self._stale_timeout and self._is_stale(pool_conn.timestamp): + logger.debug('Closing stale connection %s.', key) + super(PooledDatabase, self)._close(conn) + elif self._can_reuse(conn): + logger.debug('Returning %s to pool.', key) + heapq.heappush(self._connections, (pool_conn.timestamp, conn)) + else: + logger.debug('Closed %s.', key) + + def manual_close(self): + """ + Close the underlying connection without returning it to the pool. + """ + if self.is_closed(): + return False + + # Obtain reference to the connection in-use by the calling thread. + conn = self.connection() + + # A connection will only be re-added to the available list if it is + # marked as "in use" at the time it is closed. We will explicitly + # remove it from the "in use" list, call "close()" for the + # side-effects, and then explicitly close the connection. + self._in_use.pop(self.conn_key(conn), None) + self.close() + self._close(conn, close_conn=True) + + def close_idle(self): + # Close any open connections that are not currently in-use. + with self._lock: + for _, conn in self._connections: + self._close(conn, close_conn=True) + self._connections = [] + + def close_stale(self, age=600): + # Close any connections that are in-use but were checked out quite some + # time ago and can be considered stale. + with self._lock: + in_use = {} + cutoff = time.time() - age + n = 0 + for key, pool_conn in self._in_use.items(): + if pool_conn.checked_out < cutoff: + self._close(pool_conn.connection, close_conn=True) + n += 1 + else: + in_use[key] = pool_conn + self._in_use = in_use + return n + + def close_all(self): + # Close all connections -- available and in-use. Warning: may break any + # active connections used by other threads. + self.close() + with self._lock: + for _, conn in self._connections: + self._close(conn, close_conn=True) + for pool_conn in self._in_use.values(): + self._close(pool_conn.connection, close_conn=True) + self._connections = [] + self._in_use = {} + + +class PooledMySQLDatabase(PooledDatabase, MySQLDatabase): + def _is_closed(self, conn): + try: + conn.ping(False) + except: + return True + else: + return False + + +class _PooledPostgresqlDatabase(PooledDatabase): + def _is_closed(self, conn): + if conn.closed: + return True + + txn_status = conn.get_transaction_status() + if txn_status == TRANSACTION_STATUS_UNKNOWN: + return True + elif txn_status != TRANSACTION_STATUS_IDLE: + conn.rollback() + return False + + def _can_reuse(self, conn): + txn_status = conn.get_transaction_status() + # Do not return connection in an error state, as subsequent queries + # will all fail. If the status is unknown then we lost the connection + # to the server and the connection should not be re-used. + if txn_status == TRANSACTION_STATUS_UNKNOWN: + return False + elif txn_status == TRANSACTION_STATUS_INERROR: + conn.reset() + elif txn_status != TRANSACTION_STATUS_IDLE: + conn.rollback() + return True + +class PooledPostgresqlDatabase(_PooledPostgresqlDatabase, PostgresqlDatabase): + pass + +try: + from playhouse.postgres_ext import PostgresqlExtDatabase + + class PooledPostgresqlExtDatabase(_PooledPostgresqlDatabase, PostgresqlExtDatabase): + pass +except ImportError: + PooledPostgresqlExtDatabase = None + + +class _PooledSqliteDatabase(PooledDatabase): + def _is_closed(self, conn): + try: + conn.total_changes + except: + return True + else: + return False + +class PooledSqliteDatabase(_PooledSqliteDatabase, SqliteDatabase): + pass + +try: + from playhouse.sqlite_ext import SqliteExtDatabase + + class PooledSqliteExtDatabase(_PooledSqliteDatabase, SqliteExtDatabase): + pass +except ImportError: + PooledSqliteExtDatabase = None + +try: + from playhouse.sqlite_ext import CSqliteExtDatabase + + class PooledCSqliteExtDatabase(_PooledSqliteDatabase, CSqliteExtDatabase): + pass +except ImportError: + PooledCSqliteExtDatabase = None diff --git a/python3.9libs/playhouse/postgres_ext.py b/python3.9libs/playhouse/postgres_ext.py new file mode 100644 index 0000000..a50510d --- /dev/null +++ b/python3.9libs/playhouse/postgres_ext.py @@ -0,0 +1,493 @@ +""" +Collection of postgres-specific extensions, currently including: + +* Support for hstore, a key/value type storage +""" +import json +import logging +import uuid + +from peewee import * +from peewee import ColumnBase +from peewee import Expression +from peewee import Node +from peewee import NodeList +from peewee import SENTINEL +from peewee import __exception_wrapper__ + +try: + from psycopg2cffi import compat + compat.register() +except ImportError: + pass + +try: + from psycopg2.extras import register_hstore +except ImportError: + def register_hstore(c, globally): + pass +try: + from psycopg2.extras import Json +except: + Json = None + + +logger = logging.getLogger('peewee') + + +HCONTAINS_DICT = '@>' +HCONTAINS_KEYS = '?&' +HCONTAINS_KEY = '?' +HCONTAINS_ANY_KEY = '?|' +HKEY = '->' +HUPDATE = '||' +ACONTAINS = '@>' +ACONTAINS_ANY = '&&' +TS_MATCH = '@@' +JSONB_CONTAINS = '@>' +JSONB_CONTAINED_BY = '<@' +JSONB_CONTAINS_KEY = '?' +JSONB_CONTAINS_ANY_KEY = '?|' +JSONB_CONTAINS_ALL_KEYS = '?&' +JSONB_EXISTS = '?' +JSONB_REMOVE = '-' + + +class _LookupNode(ColumnBase): + def __init__(self, node, parts): + self.node = node + self.parts = parts + super(_LookupNode, self).__init__() + + def clone(self): + return type(self)(self.node, list(self.parts)) + + def __hash__(self): + return hash((self.__class__.__name__, id(self))) + + +class _JsonLookupBase(_LookupNode): + def __init__(self, node, parts, as_json=False): + super(_JsonLookupBase, self).__init__(node, parts) + self._as_json = as_json + + def clone(self): + return type(self)(self.node, list(self.parts), self._as_json) + + @Node.copy + def as_json(self, as_json=True): + self._as_json = as_json + + def concat(self, rhs): + if not isinstance(rhs, Node): + rhs = Json(rhs) + return Expression(self.as_json(True), OP.CONCAT, rhs) + + def contains(self, other): + clone = self.as_json(True) + if isinstance(other, (list, dict)): + return Expression(clone, JSONB_CONTAINS, Json(other)) + return Expression(clone, JSONB_EXISTS, other) + + def contains_any(self, *keys): + return Expression( + self.as_json(True), + JSONB_CONTAINS_ANY_KEY, + Value(list(keys), unpack=False)) + + def contains_all(self, *keys): + return Expression( + self.as_json(True), + JSONB_CONTAINS_ALL_KEYS, + Value(list(keys), unpack=False)) + + def has_key(self, key): + return Expression(self.as_json(True), JSONB_CONTAINS_KEY, key) + + +class JsonLookup(_JsonLookupBase): + def __getitem__(self, value): + return JsonLookup(self.node, self.parts + [value], self._as_json) + + def __sql__(self, ctx): + ctx.sql(self.node) + for part in self.parts[:-1]: + ctx.literal('->').sql(part) + if self.parts: + (ctx + .literal('->' if self._as_json else '->>') + .sql(self.parts[-1])) + + return ctx + + +class JsonPath(_JsonLookupBase): + def __sql__(self, ctx): + return (ctx + .sql(self.node) + .literal('#>' if self._as_json else '#>>') + .sql(Value('{%s}' % ','.join(map(str, self.parts))))) + + +class ObjectSlice(_LookupNode): + @classmethod + def create(cls, node, value): + if isinstance(value, slice): + parts = [value.start or 0, value.stop or 0] + elif isinstance(value, int): + parts = [value] + elif isinstance(value, Node): + parts = value + else: + # Assumes colon-separated integer indexes. + parts = [int(i) for i in value.split(':')] + return cls(node, parts) + + def __sql__(self, ctx): + ctx.sql(self.node) + if isinstance(self.parts, Node): + ctx.literal('[').sql(self.parts).literal(']') + else: + ctx.literal('[%s]' % ':'.join(str(p + 1) for p in self.parts)) + return ctx + + def __getitem__(self, value): + return ObjectSlice.create(self, value) + + +class IndexedFieldMixin(object): + default_index_type = 'GIN' + + def __init__(self, *args, **kwargs): + kwargs.setdefault('index', True) # By default, use an index. + super(IndexedFieldMixin, self).__init__(*args, **kwargs) + + +class ArrayField(IndexedFieldMixin, Field): + passthrough = True + + def __init__(self, field_class=IntegerField, field_kwargs=None, + dimensions=1, convert_values=False, *args, **kwargs): + self.__field = field_class(**(field_kwargs or {})) + self.dimensions = dimensions + self.convert_values = convert_values + self.field_type = self.__field.field_type + super(ArrayField, self).__init__(*args, **kwargs) + + def bind(self, model, name, set_attribute=True): + ret = super(ArrayField, self).bind(model, name, set_attribute) + self.__field.bind(model, '__array_%s' % name, False) + return ret + + def ddl_datatype(self, ctx): + data_type = self.__field.ddl_datatype(ctx) + return NodeList((data_type, SQL('[]' * self.dimensions)), glue='') + + def db_value(self, value): + if value is None or isinstance(value, Node): + return value + elif self.convert_values: + return self._process(self.__field.db_value, value, self.dimensions) + else: + return value if isinstance(value, list) else list(value) + + def python_value(self, value): + if self.convert_values and value is not None: + conv = self.__field.python_value + if isinstance(value, list): + return self._process(conv, value, self.dimensions) + else: + return conv(value) + else: + return value + + def _process(self, conv, value, dimensions): + dimensions -= 1 + if dimensions == 0: + return [conv(v) for v in value] + else: + return [self._process(conv, v, dimensions) for v in value] + + def __getitem__(self, value): + return ObjectSlice.create(self, value) + + def _e(op): + def inner(self, rhs): + return Expression(self, op, ArrayValue(self, rhs)) + return inner + __eq__ = _e(OP.EQ) + __ne__ = _e(OP.NE) + __gt__ = _e(OP.GT) + __ge__ = _e(OP.GTE) + __lt__ = _e(OP.LT) + __le__ = _e(OP.LTE) + __hash__ = Field.__hash__ + + def contains(self, *items): + return Expression(self, ACONTAINS, ArrayValue(self, items)) + + def contains_any(self, *items): + return Expression(self, ACONTAINS_ANY, ArrayValue(self, items)) + + +class ArrayValue(Node): + def __init__(self, field, value): + self.field = field + self.value = value + + def __sql__(self, ctx): + return (ctx + .sql(Value(self.value, unpack=False)) + .literal('::') + .sql(self.field.ddl_datatype(ctx))) + + +class DateTimeTZField(DateTimeField): + field_type = 'TIMESTAMPTZ' + + +class HStoreField(IndexedFieldMixin, Field): + field_type = 'HSTORE' + __hash__ = Field.__hash__ + + def __getitem__(self, key): + return Expression(self, HKEY, Value(key)) + + def keys(self): + return fn.akeys(self) + + def values(self): + return fn.avals(self) + + def items(self): + return fn.hstore_to_matrix(self) + + def slice(self, *args): + return fn.slice(self, Value(list(args), unpack=False)) + + def exists(self, key): + return fn.exist(self, key) + + def defined(self, key): + return fn.defined(self, key) + + def update(self, **data): + return Expression(self, HUPDATE, data) + + def delete(self, *keys): + return fn.delete(self, Value(list(keys), unpack=False)) + + def contains(self, value): + if isinstance(value, dict): + rhs = Value(value, unpack=False) + return Expression(self, HCONTAINS_DICT, rhs) + elif isinstance(value, (list, tuple)): + rhs = Value(value, unpack=False) + return Expression(self, HCONTAINS_KEYS, rhs) + return Expression(self, HCONTAINS_KEY, value) + + def contains_any(self, *keys): + return Expression(self, HCONTAINS_ANY_KEY, Value(list(keys), + unpack=False)) + + +class JSONField(Field): + field_type = 'JSON' + _json_datatype = 'json' + + def __init__(self, dumps=None, *args, **kwargs): + if Json is None: + raise Exception('Your version of psycopg2 does not support JSON.') + self.dumps = dumps or json.dumps + super(JSONField, self).__init__(*args, **kwargs) + + def db_value(self, value): + if value is None: + return value + if not isinstance(value, Json): + return Cast(self.dumps(value), self._json_datatype) + return value + + def __getitem__(self, value): + return JsonLookup(self, [value]) + + def path(self, *keys): + return JsonPath(self, keys) + + def concat(self, value): + if not isinstance(value, Node): + value = Json(value) + return super(JSONField, self).concat(value) + + +def cast_jsonb(node): + return NodeList((node, SQL('::jsonb')), glue='') + + +class BinaryJSONField(IndexedFieldMixin, JSONField): + field_type = 'JSONB' + _json_datatype = 'jsonb' + __hash__ = Field.__hash__ + + def contains(self, other): + if isinstance(other, (list, dict)): + return Expression(self, JSONB_CONTAINS, Json(other)) + elif isinstance(other, JSONField): + return Expression(self, JSONB_CONTAINS, other) + return Expression(cast_jsonb(self), JSONB_EXISTS, other) + + def contained_by(self, other): + return Expression(cast_jsonb(self), JSONB_CONTAINED_BY, Json(other)) + + def contains_any(self, *items): + return Expression( + cast_jsonb(self), + JSONB_CONTAINS_ANY_KEY, + Value(list(items), unpack=False)) + + def contains_all(self, *items): + return Expression( + cast_jsonb(self), + JSONB_CONTAINS_ALL_KEYS, + Value(list(items), unpack=False)) + + def has_key(self, key): + return Expression(cast_jsonb(self), JSONB_CONTAINS_KEY, key) + + def remove(self, *items): + return Expression( + cast_jsonb(self), + JSONB_REMOVE, + Value(list(items), unpack=False)) + + +class TSVectorField(IndexedFieldMixin, TextField): + field_type = 'TSVECTOR' + __hash__ = Field.__hash__ + + def match(self, query, language=None, plain=False): + params = (language, query) if language is not None else (query,) + func = fn.plainto_tsquery if plain else fn.to_tsquery + return Expression(self, TS_MATCH, func(*params)) + + +def Match(field, query, language=None): + params = (language, query) if language is not None else (query,) + field_params = (language, field) if language is not None else (field,) + return Expression( + fn.to_tsvector(*field_params), + TS_MATCH, + fn.to_tsquery(*params)) + + +class IntervalField(Field): + field_type = 'INTERVAL' + + +class FetchManyCursor(object): + __slots__ = ('cursor', 'array_size', 'exhausted', 'iterable') + + def __init__(self, cursor, array_size=None): + self.cursor = cursor + self.array_size = array_size or cursor.itersize + self.exhausted = False + self.iterable = self.row_gen() + + @property + def description(self): + return self.cursor.description + + def close(self): + self.cursor.close() + + def row_gen(self): + while True: + rows = self.cursor.fetchmany(self.array_size) + if not rows: + return + for row in rows: + yield row + + def fetchone(self): + if self.exhausted: + return + try: + return next(self.iterable) + except StopIteration: + self.exhausted = True + + +class ServerSideQuery(Node): + def __init__(self, query, array_size=None): + self.query = query + self.array_size = array_size + self._cursor_wrapper = None + + def __sql__(self, ctx): + return self.query.__sql__(ctx) + + def __iter__(self): + if self._cursor_wrapper is None: + self._execute(self.query._database) + return iter(self._cursor_wrapper.iterator()) + + def _execute(self, database): + if self._cursor_wrapper is None: + cursor = database.execute(self.query, named_cursor=True, + array_size=self.array_size) + self._cursor_wrapper = self.query._get_cursor_wrapper(cursor) + return self._cursor_wrapper + + +def ServerSide(query, database=None, array_size=None): + if database is None: + database = query._database + with database.transaction(): + server_side_query = ServerSideQuery(query, array_size=array_size) + for row in server_side_query: + yield row + + +class _empty_object(object): + __slots__ = () + def __nonzero__(self): + return False + __bool__ = __nonzero__ + +__named_cursor__ = _empty_object() + + +class PostgresqlExtDatabase(PostgresqlDatabase): + def __init__(self, *args, **kwargs): + self._register_hstore = kwargs.pop('register_hstore', False) + self._server_side_cursors = kwargs.pop('server_side_cursors', False) + super(PostgresqlExtDatabase, self).__init__(*args, **kwargs) + + def _connect(self): + conn = super(PostgresqlExtDatabase, self)._connect() + if self._register_hstore: + register_hstore(conn, globally=True) + return conn + + def cursor(self, commit=None): + if self.is_closed(): + if self.autoconnect: + self.connect() + else: + raise InterfaceError('Error, database connection not opened.') + if commit is __named_cursor__: + return self._state.conn.cursor(name=str(uuid.uuid1())) + return self._state.conn.cursor() + + def execute(self, query, commit=SENTINEL, named_cursor=False, + array_size=None, **context_options): + ctx = self.get_sql_context(**context_options) + sql, params = ctx.sql(query).query() + named_cursor = named_cursor or (self._server_side_cursors and + sql[:6].lower() == 'select') + if named_cursor: + commit = __named_cursor__ + cursor = self.execute_sql(sql, params, commit=commit) + if named_cursor: + cursor = FetchManyCursor(cursor, array_size) + return cursor diff --git a/python3.9libs/playhouse/psycopg3_ext.py b/python3.9libs/playhouse/psycopg3_ext.py new file mode 100644 index 0000000..9d874b1 --- /dev/null +++ b/python3.9libs/playhouse/psycopg3_ext.py @@ -0,0 +1,35 @@ +from peewee import * + +try: + import psycopg + #from psycopg.types.json import Jsonb +except ImportError: + psycopg = None + + +class Psycopg3Database(PostgresqlDatabase): + def _connect(self): + if psycopg is None: + raise ImproperlyConfigured('psycopg3 is not installed!') + conn = psycopg.connect(dbname=self.database, **self.connect_params) + if self._isolation_level is not None: + conn.isolation_level = self._isolation_level + return conn + + def get_binary_type(self): + return psycopg.Binary + + def _set_server_version(self, conn): + self.server_version = conn.pgconn.server_version + if self.server_version >= 90600: + self.safe_create_index = True + + def is_connection_usable(self): + if self._state.closed: + return False + + # Returns True if we are idle, running a command, or in an active + # connection. If the connection is in an error state or the connection + # is otherwise unusable, return False. + conn = self._state.conn + return conn.pgconn.transaction_status < conn.TransactionStatus.INERROR diff --git a/python3.9libs/playhouse/reflection.py b/python3.9libs/playhouse/reflection.py new file mode 100644 index 0000000..accaaa7 --- /dev/null +++ b/python3.9libs/playhouse/reflection.py @@ -0,0 +1,833 @@ +try: + from collections import OrderedDict +except ImportError: + OrderedDict = dict +from collections import namedtuple +from inspect import isclass +import re + +from peewee import * +from peewee import _StringField +from peewee import _query_val_transform +from peewee import CommaNodeList +from peewee import SCOPE_VALUES +from peewee import make_snake_case +from peewee import text_type +try: + from pymysql.constants import FIELD_TYPE +except ImportError: + try: + from MySQLdb.constants import FIELD_TYPE + except ImportError: + FIELD_TYPE = None +try: + from playhouse import postgres_ext +except ImportError: + postgres_ext = None +try: + from playhouse.cockroachdb import CockroachDatabase +except ImportError: + CockroachDatabase = None + +RESERVED_WORDS = set([ + 'and', 'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif', + 'else', 'except', 'exec', 'finally', 'for', 'from', 'global', 'if', + 'import', 'in', 'is', 'lambda', 'not', 'or', 'pass', 'print', 'raise', + 'return', 'try', 'while', 'with', 'yield', +]) + + +class UnknownField(object): + pass + + +class Column(object): + """ + Store metadata about a database column. + """ + primary_key_types = (IntegerField, AutoField) + + def __init__(self, name, field_class, raw_column_type, nullable, + primary_key=False, column_name=None, index=False, + unique=False, default=None, extra_parameters=None): + self.name = name + self.field_class = field_class + self.raw_column_type = raw_column_type + self.nullable = nullable + self.primary_key = primary_key + self.column_name = column_name + self.index = index + self.unique = unique + self.default = default + self.extra_parameters = extra_parameters + + # Foreign key metadata. + self.rel_model = None + self.related_name = None + self.to_field = None + + def __repr__(self): + attrs = [ + 'field_class', + 'raw_column_type', + 'nullable', + 'primary_key', + 'column_name'] + keyword_args = ', '.join( + '%s=%s' % (attr, getattr(self, attr)) + for attr in attrs) + return 'Column(%s, %s)' % (self.name, keyword_args) + + def get_field_parameters(self): + params = {} + if self.extra_parameters is not None: + params.update(self.extra_parameters) + + # Set up default attributes. + if self.nullable: + params['null'] = True + if self.field_class is ForeignKeyField or self.name != self.column_name: + params['column_name'] = "'%s'" % self.column_name + if self.primary_key and not issubclass(self.field_class, AutoField): + params['primary_key'] = True + if self.default is not None: + params['constraints'] = '[SQL("DEFAULT %s")]' % self.default + + # Handle ForeignKeyField-specific attributes. + if self.is_foreign_key(): + params['model'] = self.rel_model + if self.to_field: + params['field'] = "'%s'" % self.to_field + if self.related_name: + params['backref'] = "'%s'" % self.related_name + + # Handle indexes on column. + if not self.is_primary_key(): + if self.unique: + params['unique'] = 'True' + elif self.index and not self.is_foreign_key(): + params['index'] = 'True' + + return params + + def is_primary_key(self): + return self.field_class is AutoField or self.primary_key + + def is_foreign_key(self): + return self.field_class is ForeignKeyField + + def is_self_referential_fk(self): + return (self.field_class is ForeignKeyField and + self.rel_model == "'self'") + + def set_foreign_key(self, foreign_key, model_names, dest=None, + related_name=None): + self.foreign_key = foreign_key + self.field_class = ForeignKeyField + if foreign_key.dest_table == foreign_key.table: + self.rel_model = "'self'" + else: + self.rel_model = model_names[foreign_key.dest_table] + self.to_field = dest and dest.name or None + self.related_name = related_name or None + + def get_field(self): + # Generate the field definition for this column. + field_params = {} + for key, value in self.get_field_parameters().items(): + if isclass(value) and issubclass(value, Field): + value = value.__name__ + field_params[key] = value + + param_str = ', '.join('%s=%s' % (k, v) + for k, v in sorted(field_params.items())) + field = '%s = %s(%s)' % ( + self.name, + self.field_class.__name__, + param_str) + + if self.field_class is UnknownField: + field = '%s # %s' % (field, self.raw_column_type) + + return field + + +class Metadata(object): + column_map = {} + extension_import = '' + + def __init__(self, database): + self.database = database + self.requires_extension = False + + def execute(self, sql, *params): + return self.database.execute_sql(sql, params) + + def get_columns(self, table, schema=None): + metadata = OrderedDict( + (metadata.name, metadata) + for metadata in self.database.get_columns(table, schema)) + + # Look up the actual column type for each column. + column_types, extra_params = self.get_column_types(table, schema) + + # Look up the primary keys. + pk_names = self.get_primary_keys(table, schema) + if len(pk_names) == 1: + pk = pk_names[0] + if column_types[pk] is IntegerField: + column_types[pk] = AutoField + elif column_types[pk] is BigIntegerField: + column_types[pk] = BigAutoField + + columns = OrderedDict() + for name, column_data in metadata.items(): + field_class = column_types[name] + default = self._clean_default(field_class, column_data.default) + + columns[name] = Column( + name, + field_class=field_class, + raw_column_type=column_data.data_type, + nullable=column_data.null, + primary_key=column_data.primary_key, + column_name=name, + default=default, + extra_parameters=extra_params.get(name)) + + return columns + + def get_column_types(self, table, schema=None): + raise NotImplementedError + + def _clean_default(self, field_class, default): + if default is None or field_class in (AutoField, BigAutoField) or \ + default.lower() == 'null': + return + if issubclass(field_class, _StringField) and \ + isinstance(default, text_type) and not default.startswith("'"): + default = "'%s'" % default + return default or "''" + + def get_foreign_keys(self, table, schema=None): + return self.database.get_foreign_keys(table, schema) + + def get_primary_keys(self, table, schema=None): + return self.database.get_primary_keys(table, schema) + + def get_indexes(self, table, schema=None): + return self.database.get_indexes(table, schema) + + +class PostgresqlMetadata(Metadata): + column_map = { + 16: BooleanField, + 17: BlobField, + 20: BigIntegerField, + 21: SmallIntegerField, + 23: IntegerField, + 25: TextField, + 700: FloatField, + 701: DoubleField, + 1042: CharField, # blank-padded CHAR + 1043: CharField, + 1082: DateField, + 1114: DateTimeField, + 1184: DateTimeField, + 1083: TimeField, + 1266: TimeField, + 1700: DecimalField, + 2950: UUIDField, # UUID + } + array_types = { + 1000: BooleanField, + 1001: BlobField, + 1005: SmallIntegerField, + 1007: IntegerField, + 1009: TextField, + 1014: CharField, + 1015: CharField, + 1016: BigIntegerField, + 1115: DateTimeField, + 1182: DateField, + 1183: TimeField, + 2951: UUIDField, + } + extension_import = 'from playhouse.postgres_ext import *' + + def __init__(self, database): + super(PostgresqlMetadata, self).__init__(database) + + if postgres_ext is not None: + # Attempt to add types like HStore and JSON. + cursor = self.execute('select oid, typname, format_type(oid, NULL)' + ' from pg_type;') + results = cursor.fetchall() + + for oid, typname, formatted_type in results: + if typname == 'json': + self.column_map[oid] = postgres_ext.JSONField + elif typname == 'jsonb': + self.column_map[oid] = postgres_ext.BinaryJSONField + elif typname == 'hstore': + self.column_map[oid] = postgres_ext.HStoreField + elif typname == 'tsvector': + self.column_map[oid] = postgres_ext.TSVectorField + + for oid in self.array_types: + self.column_map[oid] = postgres_ext.ArrayField + + def get_column_types(self, table, schema): + column_types = {} + extra_params = {} + extension_types = set(( + postgres_ext.ArrayField, + postgres_ext.BinaryJSONField, + postgres_ext.JSONField, + postgres_ext.TSVectorField, + postgres_ext.HStoreField)) if postgres_ext is not None else set() + + # Look up the actual column type for each column. + identifier = '%s."%s"' % (schema, table) + cursor = self.execute( + 'SELECT attname, atttypid FROM pg_catalog.pg_attribute ' + 'WHERE attrelid = %s::regclass AND attnum > %s', identifier, 0) + + # Store column metadata in dictionary keyed by column name. + for name, oid in cursor.fetchall(): + column_types[name] = self.column_map.get(oid, UnknownField) + if column_types[name] in extension_types: + self.requires_extension = True + if oid in self.array_types: + extra_params[name] = {'field_class': self.array_types[oid]} + + return column_types, extra_params + + def get_columns(self, table, schema=None): + schema = schema or 'public' + return super(PostgresqlMetadata, self).get_columns(table, schema) + + def get_foreign_keys(self, table, schema=None): + schema = schema or 'public' + return super(PostgresqlMetadata, self).get_foreign_keys(table, schema) + + def get_primary_keys(self, table, schema=None): + schema = schema or 'public' + return super(PostgresqlMetadata, self).get_primary_keys(table, schema) + + def get_indexes(self, table, schema=None): + schema = schema or 'public' + return super(PostgresqlMetadata, self).get_indexes(table, schema) + + +class CockroachDBMetadata(PostgresqlMetadata): + # CRDB treats INT the same as BIGINT, so we just map bigint type OIDs to + # regular IntegerField. + column_map = PostgresqlMetadata.column_map.copy() + column_map[20] = IntegerField + array_types = PostgresqlMetadata.array_types.copy() + array_types[1016] = IntegerField + extension_import = 'from playhouse.cockroachdb import *' + + def __init__(self, database): + Metadata.__init__(self, database) + self.requires_extension = True + + if postgres_ext is not None: + # Attempt to add JSON types. + cursor = self.execute('select oid, typname, format_type(oid, NULL)' + ' from pg_type;') + results = cursor.fetchall() + + for oid, typname, formatted_type in results: + if typname == 'jsonb': + self.column_map[oid] = postgres_ext.BinaryJSONField + + for oid in self.array_types: + self.column_map[oid] = postgres_ext.ArrayField + + +class MySQLMetadata(Metadata): + if FIELD_TYPE is None: + column_map = {} + else: + column_map = { + FIELD_TYPE.BLOB: TextField, + FIELD_TYPE.CHAR: CharField, + FIELD_TYPE.DATE: DateField, + FIELD_TYPE.DATETIME: DateTimeField, + FIELD_TYPE.DECIMAL: DecimalField, + FIELD_TYPE.DOUBLE: FloatField, + FIELD_TYPE.FLOAT: FloatField, + FIELD_TYPE.INT24: IntegerField, + FIELD_TYPE.LONG_BLOB: TextField, + FIELD_TYPE.LONG: IntegerField, + FIELD_TYPE.LONGLONG: BigIntegerField, + FIELD_TYPE.MEDIUM_BLOB: TextField, + FIELD_TYPE.NEWDECIMAL: DecimalField, + FIELD_TYPE.SHORT: IntegerField, + FIELD_TYPE.STRING: CharField, + FIELD_TYPE.TIMESTAMP: DateTimeField, + FIELD_TYPE.TIME: TimeField, + FIELD_TYPE.TINY_BLOB: TextField, + FIELD_TYPE.TINY: IntegerField, + FIELD_TYPE.VAR_STRING: CharField, + } + + def __init__(self, database, **kwargs): + if 'password' in kwargs: + kwargs['passwd'] = kwargs.pop('password') + super(MySQLMetadata, self).__init__(database, **kwargs) + + def get_column_types(self, table, schema=None): + column_types = {} + + # Look up the actual column type for each column. + cursor = self.execute('SELECT * FROM `%s` LIMIT 1' % table) + + # Store column metadata in dictionary keyed by column name. + for column_description in cursor.description: + name, type_code = column_description[:2] + column_types[name] = self.column_map.get(type_code, UnknownField) + + return column_types, {} + + +class SqliteMetadata(Metadata): + column_map = { + 'bigint': BigIntegerField, + 'blob': BlobField, + 'bool': BooleanField, + 'boolean': BooleanField, + 'char': CharField, + 'date': DateField, + 'datetime': DateTimeField, + 'decimal': DecimalField, + 'float': FloatField, + 'integer': IntegerField, + 'integer unsigned': IntegerField, + 'int': IntegerField, + 'long': BigIntegerField, + 'numeric': DecimalField, + 'real': FloatField, + 'smallinteger': IntegerField, + 'smallint': IntegerField, + 'smallint unsigned': IntegerField, + 'text': TextField, + 'time': TimeField, + 'varchar': CharField, + } + + begin = '(?:["\[\(]+)?' + end = '(?:["\]\)]+)?' + re_foreign_key = ( + '(?:FOREIGN KEY\s*)?' + '{begin}(.+?){end}\s+(?:.+\s+)?' + 'references\s+{begin}(.+?){end}' + '\s*\(["|\[]?(.+?)["|\]]?\)').format(begin=begin, end=end) + re_varchar = r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$' + + def _map_col(self, column_type): + raw_column_type = column_type.lower() + if raw_column_type in self.column_map: + field_class = self.column_map[raw_column_type] + elif re.search(self.re_varchar, raw_column_type): + field_class = CharField + else: + column_type = re.sub('\(.+\)', '', raw_column_type) + if column_type == '': + field_class = BareField + else: + field_class = self.column_map.get(column_type, UnknownField) + return field_class + + def get_column_types(self, table, schema=None): + column_types = {} + columns = self.database.get_columns(table) + + for column in columns: + column_types[column.name] = self._map_col(column.data_type) + + return column_types, {} + + +_DatabaseMetadata = namedtuple('_DatabaseMetadata', ( + 'columns', + 'primary_keys', + 'foreign_keys', + 'model_names', + 'indexes')) + + +class DatabaseMetadata(_DatabaseMetadata): + def multi_column_indexes(self, table): + accum = [] + for index in self.indexes[table]: + if len(index.columns) > 1: + field_names = [self.columns[table][column].name + for column in index.columns + if column in self.columns[table]] + accum.append((field_names, index.unique)) + return accum + + def column_indexes(self, table): + accum = {} + for index in self.indexes[table]: + if len(index.columns) == 1: + accum[index.columns[0]] = index.unique + return accum + + +class Introspector(object): + pk_classes = [AutoField, IntegerField] + + def __init__(self, metadata, schema=None): + self.metadata = metadata + self.schema = schema + + def __repr__(self): + return '' % self.metadata.database + + @classmethod + def from_database(cls, database, schema=None): + if CockroachDatabase and isinstance(database, CockroachDatabase): + metadata = CockroachDBMetadata(database) + elif isinstance(database, PostgresqlDatabase): + metadata = PostgresqlMetadata(database) + elif isinstance(database, MySQLDatabase): + metadata = MySQLMetadata(database) + elif isinstance(database, SqliteDatabase): + metadata = SqliteMetadata(database) + else: + raise ValueError('Introspection not supported for %r' % database) + return cls(metadata, schema=schema) + + def get_database_class(self): + return type(self.metadata.database) + + def get_database_name(self): + return self.metadata.database.database + + def get_database_kwargs(self): + return self.metadata.database.connect_params + + def get_additional_imports(self): + if self.metadata.requires_extension: + return '\n' + self.metadata.extension_import + return '' + + def make_model_name(self, table, snake_case=True): + if snake_case: + table = make_snake_case(table) + model = re.sub(r'[^\w]+', '', table) + model_name = ''.join(sub.title() for sub in model.split('_')) + if not model_name[0].isalpha(): + model_name = 'T' + model_name + return model_name + + def make_column_name(self, column, is_foreign_key=False, snake_case=True): + column = column.strip() + if snake_case: + column = make_snake_case(column) + column = column.lower() + if is_foreign_key: + # Strip "_id" from foreign keys, unless the foreign-key happens to + # be named "_id", in which case the name is retained. + column = re.sub('_id$', '', column) or column + + # Remove characters that are invalid for Python identifiers. + column = re.sub(r'[^\w]+', '_', column) + if column in RESERVED_WORDS: + column += '_' + if len(column) and column[0].isdigit(): + column = '_' + column + return column + + def introspect(self, table_names=None, literal_column_names=False, + include_views=False, snake_case=True): + # Retrieve all the tables in the database. + tables = self.metadata.database.get_tables(schema=self.schema) + if include_views: + views = self.metadata.database.get_views(schema=self.schema) + tables.extend([view.name for view in views]) + + if table_names is not None: + tables = [table for table in tables if table in table_names] + table_set = set(tables) + + # Store a mapping of table name -> dictionary of columns. + columns = {} + + # Store a mapping of table name -> set of primary key columns. + primary_keys = {} + + # Store a mapping of table -> foreign keys. + foreign_keys = {} + + # Store a mapping of table name -> model name. + model_names = {} + + # Store a mapping of table name -> indexes. + indexes = {} + + # Gather the columns for each table. + for table in tables: + table_indexes = self.metadata.get_indexes(table, self.schema) + table_columns = self.metadata.get_columns(table, self.schema) + try: + foreign_keys[table] = self.metadata.get_foreign_keys( + table, self.schema) + except ValueError as exc: + err(*exc.args) + foreign_keys[table] = [] + else: + # If there is a possibility we could exclude a dependent table, + # ensure that we introspect it so FKs will work. + if table_names is not None: + for foreign_key in foreign_keys[table]: + if foreign_key.dest_table not in table_set: + tables.append(foreign_key.dest_table) + table_set.add(foreign_key.dest_table) + + model_names[table] = self.make_model_name(table, snake_case) + + # Collect sets of all the column names as well as all the + # foreign-key column names. + lower_col_names = set(column_name.lower() + for column_name in table_columns) + fks = set(fk_col.column for fk_col in foreign_keys[table]) + + for col_name, column in table_columns.items(): + if literal_column_names: + new_name = re.sub(r'[^\w]+', '_', col_name) + else: + new_name = self.make_column_name(col_name, col_name in fks, + snake_case) + + # If we have two columns, "parent" and "parent_id", ensure + # that when we don't introduce naming conflicts. + lower_name = col_name.lower() + if lower_name.endswith('_id') and new_name in lower_col_names: + new_name = col_name.lower() + + column.name = new_name + + for index in table_indexes: + if len(index.columns) == 1: + column = index.columns[0] + if column in table_columns: + table_columns[column].unique = index.unique + table_columns[column].index = True + + primary_keys[table] = self.metadata.get_primary_keys( + table, self.schema) + columns[table] = table_columns + indexes[table] = table_indexes + + # Gather all instances where we might have a `related_name` conflict, + # either due to multiple FKs on a table pointing to the same table, + # or a related_name that would conflict with an existing field. + related_names = {} + sort_fn = lambda foreign_key: foreign_key.column + for table in tables: + models_referenced = set() + for foreign_key in sorted(foreign_keys[table], key=sort_fn): + try: + column = columns[table][foreign_key.column] + except KeyError: + continue + + dest_table = foreign_key.dest_table + if dest_table in models_referenced: + related_names[column] = '%s_%s_set' % ( + dest_table, + column.name) + else: + models_referenced.add(dest_table) + + # On the second pass convert all foreign keys. + for table in tables: + for foreign_key in foreign_keys[table]: + src = columns[foreign_key.table][foreign_key.column] + try: + dest = columns[foreign_key.dest_table][ + foreign_key.dest_column] + except KeyError: + dest = None + + src.set_foreign_key( + foreign_key=foreign_key, + model_names=model_names, + dest=dest, + related_name=related_names.get(src)) + + return DatabaseMetadata( + columns, + primary_keys, + foreign_keys, + model_names, + indexes) + + def generate_models(self, skip_invalid=False, table_names=None, + literal_column_names=False, bare_fields=False, + include_views=False): + database = self.introspect(table_names, literal_column_names, + include_views) + models = {} + + class BaseModel(Model): + class Meta: + database = self.metadata.database + schema = self.schema + + def _create_model(table, models): + for foreign_key in database.foreign_keys[table]: + dest = foreign_key.dest_table + + if dest not in models and dest != table: + _create_model(dest, models) + + primary_keys = [] + columns = database.columns[table] + for column_name, column in columns.items(): + if column.primary_key: + primary_keys.append(column.name) + + multi_column_indexes = database.multi_column_indexes(table) + column_indexes = database.column_indexes(table) + + class Meta: + indexes = multi_column_indexes + table_name = table + + # Fix models with multi-column primary keys. + composite_key = False + if len(primary_keys) == 0: + primary_keys = columns.keys() + if len(primary_keys) > 1: + Meta.primary_key = CompositeKey(*[ + field.name for col, field in columns.items() + if col in primary_keys]) + composite_key = True + + attrs = {'Meta': Meta} + for column_name, column in columns.items(): + FieldClass = column.field_class + if FieldClass is not ForeignKeyField and bare_fields: + FieldClass = BareField + elif FieldClass is UnknownField: + FieldClass = BareField + + params = { + 'column_name': column_name, + 'null': column.nullable} + if column.primary_key and composite_key: + if FieldClass is AutoField: + FieldClass = IntegerField + params['primary_key'] = False + elif column.primary_key and FieldClass is not AutoField: + params['primary_key'] = True + if column.is_foreign_key(): + if column.is_self_referential_fk(): + params['model'] = 'self' + else: + dest_table = column.foreign_key.dest_table + params['model'] = models[dest_table] + if column.to_field: + params['field'] = column.to_field + + # Generate a unique related name. + params['backref'] = '%s_%s_rel' % (table, column_name) + + if column.default is not None: + constraint = SQL('DEFAULT %s' % column.default) + params['constraints'] = [constraint] + + if column_name in column_indexes and not \ + column.is_primary_key(): + if column_indexes[column_name]: + params['unique'] = True + elif not column.is_foreign_key(): + params['index'] = True + + attrs[column.name] = FieldClass(**params) + + try: + models[table] = type(str(table), (BaseModel,), attrs) + except ValueError: + if not skip_invalid: + raise + + # Actually generate Model classes. + for table, model in sorted(database.model_names.items()): + if table not in models: + _create_model(table, models) + + return models + + +def introspect(database, schema=None): + introspector = Introspector.from_database(database, schema=schema) + return introspector.introspect() + + +def generate_models(database, schema=None, **options): + introspector = Introspector.from_database(database, schema=schema) + return introspector.generate_models(**options) + + +def print_model(model, indexes=True, inline_indexes=False): + print(model._meta.name) + for field in model._meta.sorted_fields: + parts = [' %s %s' % (field.name, field.field_type)] + if field.primary_key: + parts.append(' PK') + elif inline_indexes: + if field.unique: + parts.append(' UNIQUE') + elif field.index: + parts.append(' INDEX') + if isinstance(field, ForeignKeyField): + parts.append(' FK: %s.%s' % (field.rel_model.__name__, + field.rel_field.name)) + print(''.join(parts)) + + if indexes: + index_list = model._meta.fields_to_index() + if not index_list: + return + + print('\nindex(es)') + for index in index_list: + parts = [' '] + ctx = model._meta.database.get_sql_context() + with ctx.scope_values(param='%s', quote='""'): + ctx.sql(CommaNodeList(index._expressions)) + if index._where: + ctx.literal(' WHERE ') + ctx.sql(index._where) + sql, params = ctx.query() + + clean = sql % tuple(map(_query_val_transform, params)) + parts.append(clean.replace('"', '')) + + if index._unique: + parts.append(' UNIQUE') + print(''.join(parts)) + + +def get_table_sql(model): + sql, params = model._schema._create_table().query() + if model._meta.database.param != '%s': + sql = sql.replace(model._meta.database.param, '%s') + + # Format and indent the table declaration, simplest possible approach. + match_obj = re.match('^(.+?\()(.+)(\).*)', sql) + create, columns, extra = match_obj.groups() + indented = ',\n'.join(' %s' % column for column in columns.split(', ')) + + clean = '\n'.join((create, indented, extra)).strip() + return clean % tuple(map(_query_val_transform, params)) + +def print_table_sql(model): + print(get_table_sql(model)) diff --git a/python3.9libs/playhouse/shortcuts.py b/python3.9libs/playhouse/shortcuts.py new file mode 100644 index 0000000..dc31e2c --- /dev/null +++ b/python3.9libs/playhouse/shortcuts.py @@ -0,0 +1,280 @@ +import threading + +from peewee import * +from peewee import Alias +from peewee import CompoundSelectQuery +from peewee import Metadata +from peewee import SENTINEL +from peewee import callable_ + + +_clone_set = lambda s: set(s) if s else set() + + +def model_to_dict(model, recurse=True, backrefs=False, only=None, + exclude=None, seen=None, extra_attrs=None, + fields_from_query=None, max_depth=None, manytomany=False): + """ + Convert a model instance (and any related objects) to a dictionary. + + :param bool recurse: Whether foreign-keys should be recursed. + :param bool backrefs: Whether lists of related objects should be recursed. + :param only: A list (or set) of field instances indicating which fields + should be included. + :param exclude: A list (or set) of field instances that should be + excluded from the dictionary. + :param list extra_attrs: Names of model instance attributes or methods + that should be included. + :param SelectQuery fields_from_query: Query that was source of model. Take + fields explicitly selected by the query and serialize them. + :param int max_depth: Maximum depth to recurse, value <= 0 means no max. + :param bool manytomany: Process many-to-many fields. + """ + max_depth = -1 if max_depth is None else max_depth + if max_depth == 0: + recurse = False + + only = _clone_set(only) + extra_attrs = _clone_set(extra_attrs) + should_skip = lambda n: (n in exclude) or (only and (n not in only)) + + if fields_from_query is not None: + for item in fields_from_query._returning: + if isinstance(item, Field): + only.add(item) + elif isinstance(item, Alias): + extra_attrs.add(item._alias) + + data = {} + exclude = _clone_set(exclude) + seen = _clone_set(seen) + exclude |= seen + model_class = type(model) + + if manytomany: + for name, m2m in model._meta.manytomany.items(): + if should_skip(name): + continue + + exclude.update((m2m, m2m.rel_model._meta.manytomany[m2m.backref])) + for fkf in m2m.through_model._meta.refs: + exclude.add(fkf) + + accum = [] + for rel_obj in getattr(model, name): + accum.append(model_to_dict( + rel_obj, + recurse=recurse, + backrefs=backrefs, + only=only, + exclude=exclude, + max_depth=max_depth - 1)) + data[name] = accum + + for field in model._meta.sorted_fields: + if should_skip(field): + continue + + field_data = model.__data__.get(field.name) + if isinstance(field, ForeignKeyField) and recurse: + if field_data is not None: + seen.add(field) + rel_obj = getattr(model, field.name) + field_data = model_to_dict( + rel_obj, + recurse=recurse, + backrefs=backrefs, + only=only, + exclude=exclude, + seen=seen, + max_depth=max_depth - 1) + else: + field_data = None + + data[field.name] = field_data + + if extra_attrs: + for attr_name in extra_attrs: + attr = getattr(model, attr_name) + if callable_(attr): + data[attr_name] = attr() + else: + data[attr_name] = attr + + if backrefs and recurse: + for foreign_key, rel_model in model._meta.backrefs.items(): + if foreign_key.backref == '+': continue + descriptor = getattr(model_class, foreign_key.backref) + if descriptor in exclude or foreign_key in exclude: + continue + if only and (descriptor not in only) and (foreign_key not in only): + continue + + accum = [] + exclude.add(foreign_key) + related_query = getattr(model, foreign_key.backref) + + for rel_obj in related_query: + accum.append(model_to_dict( + rel_obj, + recurse=recurse, + backrefs=backrefs, + only=only, + exclude=exclude, + max_depth=max_depth - 1)) + + data[foreign_key.backref] = accum + + return data + + +def update_model_from_dict(instance, data, ignore_unknown=False): + meta = instance._meta + backrefs = dict([(fk.backref, fk) for fk in meta.backrefs]) + + for key, value in data.items(): + if key in meta.combined: + field = meta.combined[key] + is_backref = False + elif key in backrefs: + field = backrefs[key] + is_backref = True + elif ignore_unknown: + setattr(instance, key, value) + continue + else: + raise AttributeError('Unrecognized attribute "%s" for model ' + 'class %s.' % (key, type(instance))) + + is_foreign_key = isinstance(field, ForeignKeyField) + + if not is_backref and is_foreign_key and isinstance(value, dict): + try: + rel_instance = instance.__rel__[field.name] + except KeyError: + rel_instance = field.rel_model() + setattr( + instance, + field.name, + update_model_from_dict(rel_instance, value, ignore_unknown)) + elif is_backref and isinstance(value, (list, tuple)): + instances = [ + dict_to_model(field.model, row_data, ignore_unknown) + for row_data in value] + for rel_instance in instances: + setattr(rel_instance, field.name, instance) + setattr(instance, field.backref, instances) + else: + setattr(instance, field.name, value) + + return instance + + +def dict_to_model(model_class, data, ignore_unknown=False): + return update_model_from_dict(model_class(), data, ignore_unknown) + + +class ReconnectMixin(object): + """ + Mixin class that attempts to automatically reconnect to the database under + certain error conditions. + + For example, MySQL servers will typically close connections that are idle + for 28800 seconds ("wait_timeout" setting). If your application makes use + of long-lived connections, you may find your connections are closed after + a period of no activity. This mixin will attempt to reconnect automatically + when these errors occur. + + This mixin class probably should not be used with Postgres (unless you + REALLY know what you are doing) and definitely has no business being used + with Sqlite. If you wish to use with Postgres, you will need to adapt the + `reconnect_errors` attribute to something appropriate for Postgres. + """ + reconnect_errors = ( + # Error class, error message fragment (or empty string for all). + (OperationalError, '2006'), # MySQL server has gone away. + (OperationalError, '2013'), # Lost connection to MySQL server. + (OperationalError, '2014'), # Commands out of sync. + (OperationalError, '4031'), # Client interaction timeout. + + # mysql-connector raises a slightly different error when an idle + # connection is terminated by the server. This is equivalent to 2013. + (OperationalError, 'MySQL Connection not available.'), + ) + + def __init__(self, *args, **kwargs): + super(ReconnectMixin, self).__init__(*args, **kwargs) + + # Normalize the reconnect errors to a more efficient data-structure. + self._reconnect_errors = {} + for exc_class, err_fragment in self.reconnect_errors: + self._reconnect_errors.setdefault(exc_class, []) + self._reconnect_errors[exc_class].append(err_fragment.lower()) + + def execute_sql(self, sql, params=None, commit=SENTINEL): + try: + return super(ReconnectMixin, self).execute_sql(sql, params, commit) + except Exception as exc: + exc_class = type(exc) + if exc_class not in self._reconnect_errors: + raise exc + + exc_repr = str(exc).lower() + for err_fragment in self._reconnect_errors[exc_class]: + if err_fragment in exc_repr: + break + else: + raise exc + + if not self.is_closed(): + self.close() + self.connect() + + return super(ReconnectMixin, self).execute_sql(sql, params, commit) + + +def resolve_multimodel_query(query, key='_model_identifier'): + mapping = {} + accum = [query] + while accum: + curr = accum.pop() + if isinstance(curr, CompoundSelectQuery): + accum.extend((curr.lhs, curr.rhs)) + continue + + model_class = curr.model + name = model_class._meta.table_name + mapping[name] = model_class + curr._returning.append(Value(name).alias(key)) + + def wrapped_iterator(): + for row in query.dicts().iterator(): + identifier = row.pop(key) + model = mapping[identifier] + yield model(**row) + + return wrapped_iterator() + + +class ThreadSafeDatabaseMetadata(Metadata): + """ + Metadata class to allow swapping database at run-time in a multi-threaded + application. To use: + + class Base(Model): + class Meta: + model_metadata_class = ThreadSafeDatabaseMetadata + """ + def __init__(self, *args, **kwargs): + # The database attribute is stored in a thread-local. + self._database = None + self._local = threading.local() + super(ThreadSafeDatabaseMetadata, self).__init__(*args, **kwargs) + + def _get_db(self): + return getattr(self._local, 'database', self._database) + def _set_db(self, db): + if self._database is None: + self._database = db + self._local.database = db + database = property(_get_db, _set_db) diff --git a/python3.9libs/playhouse/signals.py b/python3.9libs/playhouse/signals.py new file mode 100644 index 0000000..4e92872 --- /dev/null +++ b/python3.9libs/playhouse/signals.py @@ -0,0 +1,79 @@ +""" +Provide django-style hooks for model events. +""" +from peewee import Model as _Model + + +class Signal(object): + def __init__(self): + self._flush() + + def _flush(self): + self._receivers = set() + self._receiver_list = [] + + def connect(self, receiver, name=None, sender=None): + name = name or receiver.__name__ + key = (name, sender) + if key not in self._receivers: + self._receivers.add(key) + self._receiver_list.append((name, receiver, sender)) + else: + raise ValueError('receiver named %s (for sender=%s) already ' + 'connected' % (name, sender or 'any')) + + def disconnect(self, receiver=None, name=None, sender=None): + if receiver: + name = name or receiver.__name__ + if not name: + raise ValueError('a receiver or a name must be provided') + + key = (name, sender) + if key not in self._receivers: + raise ValueError('receiver named %s for sender=%s not found.' % + (name, sender or 'any')) + + self._receivers.remove(key) + self._receiver_list = [(n, r, s) for n, r, s in self._receiver_list + if n != name and s != sender] + + def __call__(self, name=None, sender=None): + def decorator(fn): + self.connect(fn, name, sender) + return fn + return decorator + + def send(self, instance, *args, **kwargs): + sender = type(instance) + responses = [] + for n, r, s in self._receiver_list: + if s is None or isinstance(instance, s): + responses.append((r, r(sender, instance, *args, **kwargs))) + return responses + + +pre_save = Signal() +post_save = Signal() +pre_delete = Signal() +post_delete = Signal() +pre_init = Signal() + + +class Model(_Model): + def __init__(self, *args, **kwargs): + super(Model, self).__init__(*args, **kwargs) + pre_init.send(self) + + def save(self, *args, **kwargs): + pk_value = self._pk if self._meta.primary_key else True + created = kwargs.get('force_insert', False) or not bool(pk_value) + pre_save.send(self, created=created) + ret = super(Model, self).save(*args, **kwargs) + post_save.send(self, created=created) + return ret + + def delete_instance(self, *args, **kwargs): + pre_delete.send(self) + ret = super(Model, self).delete_instance(*args, **kwargs) + post_delete.send(self) + return ret diff --git a/python3.9libs/playhouse/sqlcipher_ext.py b/python3.9libs/playhouse/sqlcipher_ext.py new file mode 100644 index 0000000..66558d0 --- /dev/null +++ b/python3.9libs/playhouse/sqlcipher_ext.py @@ -0,0 +1,106 @@ +""" +Peewee integration with pysqlcipher. + +Project page: https://github.com/leapcode/pysqlcipher/ + +**WARNING!!! EXPERIMENTAL!!!** + +* Although this extention's code is short, it has not been properly + peer-reviewed yet and may have introduced vulnerabilities. + +Also note that this code relies on pysqlcipher and sqlcipher, and +the code there might have vulnerabilities as well, but since these +are widely used crypto modules, we can expect "short zero days" there. + +Example usage: + + from peewee.playground.ciphersql_ext import SqlCipherDatabase + db = SqlCipherDatabase('/path/to/my.db', passphrase="don'tuseme4real") + +* `passphrase`: should be "long enough". + Note that *length beats vocabulary* (much exponential), and even + a lowercase-only passphrase like easytorememberyethardforotherstoguess + packs more noise than 8 random printable characters and *can* be memorized. + +When opening an existing database, passphrase should be the one used when the +database was created. If the passphrase is incorrect, an exception will only be +raised **when you access the database**. + +If you need to ask for an interactive passphrase, here's example code you can +put after the `db = ...` line: + + try: # Just access the database so that it checks the encryption. + db.get_tables() + # We're looking for a DatabaseError with a specific error message. + except peewee.DatabaseError as e: + # Check whether the message *means* "passphrase is wrong" + if e.args[0] == 'file is encrypted or is not a database': + raise Exception('Developer should Prompt user for passphrase ' + 'again.') + else: + # A different DatabaseError. Raise it. + raise e + +See a more elaborate example with this code at +https://gist.github.com/thedod/11048875 +""" +import datetime +import decimal +import sys + +from peewee import * +from playhouse.sqlite_ext import SqliteExtDatabase +if sys.version_info[0] != 3: + from pysqlcipher import dbapi2 as sqlcipher +else: + try: + from sqlcipher3 import dbapi2 as sqlcipher + except ImportError: + from pysqlcipher3 import dbapi2 as sqlcipher + +sqlcipher.register_adapter(decimal.Decimal, str) +sqlcipher.register_adapter(datetime.date, str) +sqlcipher.register_adapter(datetime.time, str) +__sqlcipher_version__ = sqlcipher.sqlite_version_info + + +class _SqlCipherDatabase(object): + server_version = __sqlcipher_version__ + + def _connect(self): + params = dict(self.connect_params) + passphrase = params.pop('passphrase', '').replace("'", "''") + + conn = sqlcipher.connect(self.database, isolation_level=None, **params) + try: + if passphrase: + conn.execute("PRAGMA key='%s'" % passphrase) + self._add_conn_hooks(conn) + except: + conn.close() + raise + return conn + + def set_passphrase(self, passphrase): + if not self.is_closed(): + raise ImproperlyConfigured('Cannot set passphrase when database ' + 'is open. To change passphrase of an ' + 'open database use the rekey() method.') + + self.connect_params['passphrase'] = passphrase + + def rekey(self, passphrase): + if self.is_closed(): + self.connect() + + self.execute_sql("PRAGMA rekey='%s'" % passphrase.replace("'", "''")) + self.connect_params['passphrase'] = passphrase + return True + + +class SqlCipherDatabase(_SqlCipherDatabase, SqliteDatabase): + pass + + +class SqlCipherExtDatabase(_SqlCipherDatabase, SqliteExtDatabase): + pass diff --git a/python3.9libs/playhouse/sqlite_changelog.py b/python3.9libs/playhouse/sqlite_changelog.py new file mode 100644 index 0000000..b036af2 --- /dev/null +++ b/python3.9libs/playhouse/sqlite_changelog.py @@ -0,0 +1,123 @@ +from peewee import * +from playhouse.sqlite_ext import JSONField + + +class BaseChangeLog(Model): + timestamp = DateTimeField(constraints=[SQL('DEFAULT CURRENT_TIMESTAMP')]) + action = TextField() + table = TextField() + primary_key = IntegerField() + changes = JSONField() + + +class ChangeLog(object): + # Model class that will serve as the base for the changelog. This model + # will be subclassed and mapped to your application database. + base_model = BaseChangeLog + + # Template for the triggers that handle updating the changelog table. + # table: table name + # action: insert / update / delete + # new_old: NEW or OLD (OLD is for DELETE) + # primary_key: table primary key column name + # column_array: output of build_column_array() + # change_table: changelog table name + template = """CREATE TRIGGER IF NOT EXISTS %(table)s_changes_%(action)s + AFTER %(action)s ON %(table)s + BEGIN + INSERT INTO %(change_table)s + ("action", "table", "primary_key", "changes") + SELECT + '%(action)s', '%(table)s', %(new_old)s."%(primary_key)s", "changes" + FROM ( + SELECT json_group_object( + col, + json_array("oldval", "newval")) AS "changes" + FROM ( + SELECT json_extract(value, '$[0]') as "col", + json_extract(value, '$[1]') as "oldval", + json_extract(value, '$[2]') as "newval" + FROM json_each(json_array(%(column_array)s)) + WHERE "oldval" IS NOT "newval" + ) + ); + END;""" + + drop_template = 'DROP TRIGGER IF EXISTS %(table)s_changes_%(action)s' + + _actions = ('INSERT', 'UPDATE', 'DELETE') + + def __init__(self, db, table_name='changelog'): + self.db = db + self.table_name = table_name + + def _build_column_array(self, model, use_old, use_new, skip_fields=None): + # Builds a list of SQL expressions for each field we are tracking. This + # is used as the data source for change tracking in our trigger. + col_array = [] + for field in model._meta.sorted_fields: + if field.primary_key: + continue + + if skip_fields is not None and field.name in skip_fields: + continue + + column = field.column_name + new = 'NULL' if not use_new else 'NEW."%s"' % column + old = 'NULL' if not use_old else 'OLD."%s"' % column + + if isinstance(field, JSONField): + # Ensure that values are cast to JSON so that the serialization + # is preserved when calculating the old / new. + if use_old: old = 'json(%s)' % old + if use_new: new = 'json(%s)' % new + + col_array.append("json_array('%s', %s, %s)" % (column, old, new)) + + return ', '.join(col_array) + + def trigger_sql(self, model, action, skip_fields=None): + assert action in self._actions + use_old = action != 'INSERT' + use_new = action != 'DELETE' + cols = self._build_column_array(model, use_old, use_new, skip_fields) + return self.template % { + 'table': model._meta.table_name, + 'action': action, + 'new_old': 'NEW' if action != 'DELETE' else 'OLD', + 'primary_key': model._meta.primary_key.column_name, + 'column_array': cols, + 'change_table': self.table_name} + + def drop_trigger_sql(self, model, action): + assert action in self._actions + return self.drop_template % { + 'table': model._meta.table_name, + 'action': action} + + @property + def model(self): + if not hasattr(self, '_changelog_model'): + class ChangeLog(self.base_model): + class Meta: + database = self.db + table_name = self.table_name + self._changelog_model = ChangeLog + + return self._changelog_model + + def install(self, model, skip_fields=None, drop=True, insert=True, + update=True, delete=True, create_table=True): + ChangeLog = self.model + if create_table: + ChangeLog.create_table() + + actions = list(zip((insert, update, delete), self._actions)) + if drop: + for _, action in actions: + self.db.execute_sql(self.drop_trigger_sql(model, action)) + + for enabled, action in actions: + if enabled: + sql = self.trigger_sql(model, action, skip_fields) + self.db.execute_sql(sql) diff --git a/python3.9libs/playhouse/sqlite_ext.py b/python3.9libs/playhouse/sqlite_ext.py new file mode 100644 index 0000000..e20ead6 --- /dev/null +++ b/python3.9libs/playhouse/sqlite_ext.py @@ -0,0 +1,1311 @@ +import json +import math +import re +import struct +import sys + +from peewee import * +from peewee import ColumnBase +from peewee import EnclosedNodeList +from peewee import Entity +from peewee import Expression +from peewee import Insert +from peewee import Node +from peewee import NodeList +from peewee import OP +from peewee import VirtualField +from peewee import merge_dict +from peewee import sqlite3 +try: + from playhouse._sqlite_ext import ( + backup, + backup_to_file, + Blob, + ConnectionHelper, + register_bloomfilter, + register_hash_functions, + register_rank_functions, + sqlite_get_db_status, + sqlite_get_status, + TableFunction, + ZeroBlob, + ) + CYTHON_SQLITE_EXTENSIONS = True +except ImportError: + CYTHON_SQLITE_EXTENSIONS = False + + +if sys.version_info[0] == 3: + basestring = str + + +FTS3_MATCHINFO = 'pcx' +FTS4_MATCHINFO = 'pcnalx' +if sqlite3 is not None: + FTS_VERSION = 4 if sqlite3.sqlite_version_info[:3] >= (3, 7, 4) else 3 +else: + FTS_VERSION = 3 + +FTS5_MIN_SQLITE_VERSION = (3, 9, 0) + + +class RowIDField(AutoField): + auto_increment = True + column_name = name = required_name = 'rowid' + + def bind(self, model, name, *args): + if name != self.required_name: + raise ValueError('%s must be named "%s".' % + (type(self), self.required_name)) + super(RowIDField, self).bind(model, name, *args) + + +class DocIDField(RowIDField): + column_name = name = required_name = 'docid' + + +class AutoIncrementField(AutoField): + def ddl(self, ctx): + node_list = super(AutoIncrementField, self).ddl(ctx) + return NodeList((node_list, SQL('AUTOINCREMENT'))) + + +class TDecimalField(DecimalField): + field_type = 'TEXT' + def get_modifiers(self): pass + + +class JSONPath(ColumnBase): + def __init__(self, field, path=None): + super(JSONPath, self).__init__() + self._field = field + self._path = path or () + + @property + def path(self): + return Value('$%s' % ''.join(self._path)) + + def __getitem__(self, idx): + if isinstance(idx, int): + item = '[%s]' % idx + else: + item = '.%s' % idx + return JSONPath(self._field, self._path + (item,)) + + def set(self, value, as_json=None): + if as_json or isinstance(value, (list, dict)): + value = fn.json(self._field._json_dumps(value)) + return fn.json_set(self._field, self.path, value) + + def update(self, value): + return self.set(fn.json_patch(self, self._field._json_dumps(value))) + + def remove(self): + return fn.json_remove(self._field, self.path) + + def json_type(self): + return fn.json_type(self._field, self.path) + + def length(self): + return fn.json_array_length(self._field, self.path) + + def children(self): + return fn.json_each(self._field, self.path) + + def tree(self): + return fn.json_tree(self._field, self.path) + + def __sql__(self, ctx): + return ctx.sql(fn.json_extract(self._field, self.path) + if self._path else self._field) + + +class JSONField(TextField): + field_type = 'JSON' + unpack = False + + def __init__(self, json_dumps=None, json_loads=None, **kwargs): + self._json_dumps = json_dumps or json.dumps + self._json_loads = json_loads or json.loads + super(JSONField, self).__init__(**kwargs) + + def python_value(self, value): + if value is not None: + try: + return self._json_loads(value) + except (TypeError, ValueError): + return value + + def db_value(self, value): + if value is not None: + if not isinstance(value, Node): + value = fn.json(self._json_dumps(value)) + return value + + def _e(op): + def inner(self, rhs): + if isinstance(rhs, (list, dict)): + rhs = Value(rhs, converter=self.db_value, unpack=False) + return Expression(self, op, rhs) + return inner + __eq__ = _e(OP.EQ) + __ne__ = _e(OP.NE) + __gt__ = _e(OP.GT) + __ge__ = _e(OP.GTE) + __lt__ = _e(OP.LT) + __le__ = _e(OP.LTE) + __hash__ = Field.__hash__ + + def __getitem__(self, item): + return JSONPath(self)[item] + + def set(self, value, as_json=None): + return JSONPath(self).set(value, as_json) + + def update(self, data): + return JSONPath(self).update(data) + + def remove(self): + return JSONPath(self).remove() + + def json_type(self): + return fn.json_type(self) + + def length(self): + return fn.json_array_length(self) + + def children(self): + """ + Schema of `json_each` and `json_tree`: + + key, + value, + type TEXT (object, array, string, etc), + atom (value for primitive/scalar types, NULL for array and object) + id INTEGER (unique identifier for element) + parent INTEGER (unique identifier of parent element or NULL) + fullkey TEXT (full path describing element) + path TEXT (path to the container of the current element) + json JSON hidden (1st input parameter to function) + root TEXT hidden (2nd input parameter, path at which to start) + """ + return fn.json_each(self) + + def tree(self): + return fn.json_tree(self) + + +class SearchField(Field): + def __init__(self, unindexed=False, column_name=None, **k): + if k: + raise ValueError('SearchField does not accept these keyword ' + 'arguments: %s.' % sorted(k)) + super(SearchField, self).__init__(unindexed=unindexed, + column_name=column_name, null=True) + + def match(self, term): + return match(self, term) + + @property + def fts_column_index(self): + if not hasattr(self, '_fts_column_index'): + search_fields = [f.name for f in self.model._meta.sorted_fields + if isinstance(f, SearchField)] + self._fts_column_index = search_fields.index(self.name) + return self._fts_column_index + + def highlight(self, left, right): + column_idx = self.fts_column_index + return fn.highlight(self.model._meta.entity, column_idx, left, right) + + def snippet(self, left, right, over_length='...', max_tokens=16): + if not (0 < max_tokens < 65): + raise ValueError('max_tokens must be between 1 and 64 (inclusive)') + column_idx = self.fts_column_index + return fn.snippet(self.model._meta.entity, column_idx, left, right, + over_length, max_tokens) + + +class VirtualTableSchemaManager(SchemaManager): + def _create_virtual_table(self, safe=True, **options): + options = self.model.clean_options( + merge_dict(self.model._meta.options, options)) + + # Structure: + # CREATE VIRTUAL TABLE + # USING + # ([prefix_arguments, ...] fields, ... [arguments, ...], [options...]) + ctx = self._create_context() + ctx.literal('CREATE VIRTUAL TABLE ') + if safe: + ctx.literal('IF NOT EXISTS ') + (ctx + .sql(self.model) + .literal(' USING ')) + + ext_module = self.model._meta.extension_module + if isinstance(ext_module, Node): + return ctx.sql(ext_module) + + ctx.sql(SQL(ext_module)).literal(' ') + arguments = [] + meta = self.model._meta + + if meta.prefix_arguments: + arguments.extend([SQL(a) for a in meta.prefix_arguments]) + + # Constraints, data-types, foreign and primary keys are all omitted. + for field in meta.sorted_fields: + if isinstance(field, (RowIDField)) or field._hidden: + continue + field_def = [Entity(field.column_name)] + if field.unindexed: + field_def.append(SQL('UNINDEXED')) + arguments.append(NodeList(field_def)) + + if meta.arguments: + arguments.extend([SQL(a) for a in meta.arguments]) + + if options: + arguments.extend(self._create_table_option_sql(options)) + return ctx.sql(EnclosedNodeList(arguments)) + + def _create_table(self, safe=True, **options): + if issubclass(self.model, VirtualModel): + return self._create_virtual_table(safe, **options) + + return super(VirtualTableSchemaManager, self)._create_table( + safe, **options) + + +class VirtualModel(Model): + class Meta: + arguments = None + extension_module = None + prefix_arguments = None + primary_key = False + schema_manager_class = VirtualTableSchemaManager + + @classmethod + def clean_options(cls, options): + return options + + +class BaseFTSModel(VirtualModel): + @classmethod + def clean_options(cls, options): + content = options.get('content') + prefix = options.get('prefix') + tokenize = options.get('tokenize') + + if isinstance(content, basestring) and content == '': + # Special-case content-less full-text search tables. + options['content'] = "''" + elif isinstance(content, Field): + # Special-case to ensure fields are fully-qualified. + options['content'] = Entity(content.model._meta.table_name, + content.column_name) + + if prefix: + if isinstance(prefix, (list, tuple)): + prefix = ','.join([str(i) for i in prefix]) + options['prefix'] = "'%s'" % prefix.strip("' ") + + if tokenize and cls._meta.extension_module.lower() == 'fts5': + # Tokenizers need to be in quoted string for FTS5, but not for FTS3 + # or FTS4. + options['tokenize'] = '"%s"' % tokenize + + return options + + +class FTSModel(BaseFTSModel): + """ + VirtualModel class for creating tables that use either the FTS3 or FTS4 + search extensions. Peewee automatically determines which version of the + FTS extension is supported and will use FTS4 if possible. + """ + # FTS3/4 uses "docid" in the same way a normal table uses "rowid". + docid = DocIDField() + + class Meta: + extension_module = 'FTS%s' % FTS_VERSION + + @classmethod + def _fts_cmd(cls, cmd): + tbl = cls._meta.table_name + res = cls._meta.database.execute_sql( + "INSERT INTO %s(%s) VALUES('%s');" % (tbl, tbl, cmd)) + return res.fetchone() + + @classmethod + def optimize(cls): + return cls._fts_cmd('optimize') + + @classmethod + def rebuild(cls): + return cls._fts_cmd('rebuild') + + @classmethod + def integrity_check(cls): + return cls._fts_cmd('integrity-check') + + @classmethod + def merge(cls, blocks=200, segments=8): + return cls._fts_cmd('merge=%s,%s' % (blocks, segments)) + + @classmethod + def automerge(cls, state=True): + return cls._fts_cmd('automerge=%s' % (state and '1' or '0')) + + @classmethod + def match(cls, term): + """ + Generate a `MATCH` expression appropriate for searching this table. + """ + return match(cls._meta.entity, term) + + @classmethod + def rank(cls, *weights): + matchinfo = fn.matchinfo(cls._meta.entity, FTS3_MATCHINFO) + return fn.fts_rank(matchinfo, *weights) + + @classmethod + def bm25(cls, *weights): + match_info = fn.matchinfo(cls._meta.entity, FTS4_MATCHINFO) + return fn.fts_bm25(match_info, *weights) + + @classmethod + def bm25f(cls, *weights): + match_info = fn.matchinfo(cls._meta.entity, FTS4_MATCHINFO) + return fn.fts_bm25f(match_info, *weights) + + @classmethod + def lucene(cls, *weights): + match_info = fn.matchinfo(cls._meta.entity, FTS4_MATCHINFO) + return fn.fts_lucene(match_info, *weights) + + @classmethod + def _search(cls, term, weights, with_score, score_alias, score_fn, + explicit_ordering): + if not weights: + rank = score_fn() + elif isinstance(weights, dict): + weight_args = [] + for field in cls._meta.sorted_fields: + # Attempt to get the specified weight of the field by looking + # it up using it's field instance followed by name. + field_weight = weights.get(field, weights.get(field.name, 1.0)) + weight_args.append(field_weight) + rank = score_fn(*weight_args) + else: + rank = score_fn(*weights) + + selection = () + order_by = rank + if with_score: + selection = (cls, rank.alias(score_alias)) + if with_score and not explicit_ordering: + order_by = SQL(score_alias) + + return (cls + .select(*selection) + .where(cls.match(term)) + .order_by(order_by)) + + @classmethod + def search(cls, term, weights=None, with_score=False, score_alias='score', + explicit_ordering=False): + """Full-text search using selected `term`.""" + return cls._search( + term, + weights, + with_score, + score_alias, + cls.rank, + explicit_ordering) + + @classmethod + def search_bm25(cls, term, weights=None, with_score=False, + score_alias='score', explicit_ordering=False): + """Full-text search for selected `term` using BM25 algorithm.""" + return cls._search( + term, + weights, + with_score, + score_alias, + cls.bm25, + explicit_ordering) + + @classmethod + def search_bm25f(cls, term, weights=None, with_score=False, + score_alias='score', explicit_ordering=False): + """Full-text search for selected `term` using BM25 algorithm.""" + return cls._search( + term, + weights, + with_score, + score_alias, + cls.bm25f, + explicit_ordering) + + @classmethod + def search_lucene(cls, term, weights=None, with_score=False, + score_alias='score', explicit_ordering=False): + """Full-text search for selected `term` using BM25 algorithm.""" + return cls._search( + term, + weights, + with_score, + score_alias, + cls.lucene, + explicit_ordering) + + +_alphabet = 'abcdefghijklmnopqrstuvwxyz' +_alphanum = (set('\t ,"(){}*:_+0123456789') | + set(_alphabet) | + set(_alphabet.upper()) | + set((chr(26),))) +_invalid_ascii = set(chr(p) for p in range(128) if chr(p) not in _alphanum) +_quote_re = re.compile(r'(?:[^\s"]|"(?:\\.|[^"])*")+') + + +class FTS5Model(BaseFTSModel): + """ + Requires SQLite >= 3.9.0. + + Table options: + + content: table name of external content, or empty string for "contentless" + content_rowid: column name of external content primary key + prefix: integer(s). Ex: '2' or '2 3 4' + tokenize: porter, unicode61, ascii. Ex: 'porter unicode61' + + The unicode tokenizer supports the following parameters: + + * remove_diacritics (1 or 0, default is 1) + * tokenchars (string of characters, e.g. '-_' + * separators (string of characters) + + Parameters are passed as alternating parameter name and value, so: + + {'tokenize': "unicode61 remove_diacritics 0 tokenchars '-_'"} + + Content-less tables: + + If you don't need the full-text content in it's original form, you can + specify a content-less table. Searches and auxiliary functions will work + as usual, but the only values returned when SELECT-ing can be rowid. Also + content-less tables do not support UPDATE or DELETE. + + External content tables: + + You can set up triggers to sync these, e.g. + + -- Create a table. And an external content fts5 table to index it. + CREATE TABLE tbl(a INTEGER PRIMARY KEY, b); + CREATE VIRTUAL TABLE ft USING fts5(b, content='tbl', content_rowid='a'); + + -- Triggers to keep the FTS index up to date. + CREATE TRIGGER tbl_ai AFTER INSERT ON tbl BEGIN + INSERT INTO ft(rowid, b) VALUES (new.a, new.b); + END; + CREATE TRIGGER tbl_ad AFTER DELETE ON tbl BEGIN + INSERT INTO ft(fts_idx, rowid, b) VALUES('delete', old.a, old.b); + END; + CREATE TRIGGER tbl_au AFTER UPDATE ON tbl BEGIN + INSERT INTO ft(fts_idx, rowid, b) VALUES('delete', old.a, old.b); + INSERT INTO ft(rowid, b) VALUES (new.a, new.b); + END; + + Built-in auxiliary functions: + + * bm25(tbl[, weight_0, ... weight_n]) + * highlight(tbl, col_idx, prefix, suffix) + * snippet(tbl, col_idx, prefix, suffix, ?, max_tokens) + """ + # FTS5 does not support declared primary keys, but we can use the + # implicit rowid. + rowid = RowIDField() + + class Meta: + extension_module = 'fts5' + + _error_messages = { + 'field_type': ('Besides the implicit `rowid` column, all columns must ' + 'be instances of SearchField'), + 'index': 'Secondary indexes are not supported for FTS5 models', + 'pk': 'FTS5 models must use the default `rowid` primary key', + } + + @classmethod + def validate_model(cls): + # Perform FTS5-specific validation and options post-processing. + if cls._meta.primary_key.name != 'rowid': + raise ImproperlyConfigured(cls._error_messages['pk']) + for field in cls._meta.fields.values(): + if not isinstance(field, (SearchField, RowIDField)): + raise ImproperlyConfigured(cls._error_messages['field_type']) + if cls._meta.indexes: + raise ImproperlyConfigured(cls._error_messages['index']) + + @classmethod + def fts5_installed(cls): + if sqlite3.sqlite_version_info[:3] < FTS5_MIN_SQLITE_VERSION: + return False + + # Test in-memory DB to determine if the FTS5 extension is installed. + tmp_db = sqlite3.connect(':memory:') + try: + tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);') + except: + try: + tmp_db.enable_load_extension(True) + tmp_db.load_extension('fts5') + except: + return False + else: + cls._meta.database.load_extension('fts5') + finally: + tmp_db.close() + + return True + + @staticmethod + def validate_query(query): + """ + Simple helper function to indicate whether a search query is a + valid FTS5 query. Note: this simply looks at the characters being + used, and is not guaranteed to catch all problematic queries. + """ + tokens = _quote_re.findall(query) + for token in tokens: + if token.startswith('"') and token.endswith('"'): + continue + if set(token) & _invalid_ascii: + return False + return True + + @staticmethod + def clean_query(query, replace=chr(26)): + """ + Clean a query of invalid tokens. + """ + accum = [] + any_invalid = False + tokens = _quote_re.findall(query) + for token in tokens: + if token.startswith('"') and token.endswith('"'): + accum.append(token) + continue + token_set = set(token) + invalid_for_token = token_set & _invalid_ascii + if invalid_for_token: + any_invalid = True + for c in invalid_for_token: + token = token.replace(c, replace) + accum.append(token) + + if any_invalid: + return ' '.join(accum) + return query + + @classmethod + def match(cls, term): + """ + Generate a `MATCH` expression appropriate for searching this table. + """ + return match(cls._meta.entity, term) + + @classmethod + def rank(cls, *args): + return cls.bm25(*args) if args else SQL('rank') + + @classmethod + def bm25(cls, *weights): + return fn.bm25(cls._meta.entity, *weights) + + @classmethod + def search(cls, term, weights=None, with_score=False, score_alias='score', + explicit_ordering=False): + """Full-text search using selected `term`.""" + return cls.search_bm25( + FTS5Model.clean_query(term), + weights, + with_score, + score_alias, + explicit_ordering) + + @classmethod + def search_bm25(cls, term, weights=None, with_score=False, + score_alias='score', explicit_ordering=False): + """Full-text search using selected `term`.""" + if not weights: + rank = SQL('rank') + elif isinstance(weights, dict): + weight_args = [] + for field in cls._meta.sorted_fields: + if isinstance(field, SearchField) and not field.unindexed: + weight_args.append( + weights.get(field, weights.get(field.name, 1.0))) + rank = fn.bm25(cls._meta.entity, *weight_args) + else: + rank = fn.bm25(cls._meta.entity, *weights) + + selection = () + order_by = rank + if with_score: + selection = (cls, rank.alias(score_alias)) + if with_score and not explicit_ordering: + order_by = SQL(score_alias) + + return (cls + .select(*selection) + .where(cls.match(FTS5Model.clean_query(term))) + .order_by(order_by)) + + @classmethod + def _fts_cmd_sql(cls, cmd, **extra_params): + tbl = cls._meta.entity + columns = [tbl] + values = [cmd] + for key, value in extra_params.items(): + columns.append(Entity(key)) + values.append(value) + + return NodeList(( + SQL('INSERT INTO'), + cls._meta.entity, + EnclosedNodeList(columns), + SQL('VALUES'), + EnclosedNodeList(values))) + + @classmethod + def _fts_cmd(cls, cmd, **extra_params): + query = cls._fts_cmd_sql(cmd, **extra_params) + return cls._meta.database.execute(query) + + @classmethod + def automerge(cls, level): + if not (0 <= level <= 16): + raise ValueError('level must be between 0 and 16') + return cls._fts_cmd('automerge', rank=level) + + @classmethod + def merge(cls, npages): + return cls._fts_cmd('merge', rank=npages) + + @classmethod + def set_pgsz(cls, pgsz): + return cls._fts_cmd('pgsz', rank=pgsz) + + @classmethod + def set_rank(cls, rank_expression): + return cls._fts_cmd('rank', rank=rank_expression) + + @classmethod + def delete_all(cls): + return cls._fts_cmd('delete-all') + + @classmethod + def VocabModel(cls, table_type='row', table=None): + if table_type not in ('row', 'col', 'instance'): + raise ValueError('table_type must be either "row", "col" or ' + '"instance".') + + attr = '_vocab_model_%s' % table_type + + if not hasattr(cls, attr): + class Meta: + database = cls._meta.database + table_name = table or cls._meta.table_name + '_v' + extension_module = fn.fts5vocab( + cls._meta.entity, + SQL(table_type)) + + attrs = { + 'term': VirtualField(TextField), + 'doc': IntegerField(), + 'cnt': IntegerField(), + 'rowid': RowIDField(), + 'Meta': Meta, + } + if table_type == 'col': + attrs['col'] = VirtualField(TextField) + elif table_type == 'instance': + attrs['offset'] = VirtualField(IntegerField) + + class_name = '%sVocab' % cls.__name__ + setattr(cls, attr, type(class_name, (VirtualModel,), attrs)) + + return getattr(cls, attr) + + +def ClosureTable(model_class, foreign_key=None, referencing_class=None, + referencing_key=None): + """Model factory for the transitive closure extension.""" + if referencing_class is None: + referencing_class = model_class + + if foreign_key is None: + for field_obj in model_class._meta.refs: + if field_obj.rel_model is model_class: + foreign_key = field_obj + break + else: + raise ValueError('Unable to find self-referential foreign key.') + + source_key = model_class._meta.primary_key + if referencing_key is None: + referencing_key = source_key + + class BaseClosureTable(VirtualModel): + depth = VirtualField(IntegerField) + id = VirtualField(IntegerField) + idcolumn = VirtualField(TextField) + parentcolumn = VirtualField(TextField) + root = VirtualField(IntegerField) + tablename = VirtualField(TextField) + + class Meta: + extension_module = 'transitive_closure' + + @classmethod + def descendants(cls, node, depth=None, include_node=False): + query = (model_class + .select(model_class, cls.depth.alias('depth')) + .join(cls, on=(source_key == cls.id)) + .where(cls.root == node) + .objects()) + if depth is not None: + query = query.where(cls.depth == depth) + elif not include_node: + query = query.where(cls.depth > 0) + return query + + @classmethod + def ancestors(cls, node, depth=None, include_node=False): + query = (model_class + .select(model_class, cls.depth.alias('depth')) + .join(cls, on=(source_key == cls.root)) + .where(cls.id == node) + .objects()) + if depth: + query = query.where(cls.depth == depth) + elif not include_node: + query = query.where(cls.depth > 0) + return query + + @classmethod + def siblings(cls, node, include_node=False): + if referencing_class is model_class: + # self-join + fk_value = node.__data__.get(foreign_key.name) + query = model_class.select().where(foreign_key == fk_value) + else: + # siblings as given in reference_class + siblings = (referencing_class + .select(referencing_key) + .join(cls, on=(foreign_key == cls.root)) + .where((cls.id == node) & (cls.depth == 1))) + + # the according models + query = (model_class + .select() + .where(source_key << siblings) + .objects()) + + if not include_node: + query = query.where(source_key != node) + + return query + + class Meta: + database = referencing_class._meta.database + options = { + 'tablename': referencing_class._meta.table_name, + 'idcolumn': referencing_key.column_name, + 'parentcolumn': foreign_key.column_name} + primary_key = False + + name = '%sClosure' % model_class.__name__ + return type(name, (BaseClosureTable,), {'Meta': Meta}) + + +class LSMTable(VirtualModel): + class Meta: + extension_module = 'lsm1' + filename = None + + @classmethod + def clean_options(cls, options): + filename = cls._meta.filename + if not filename: + raise ValueError('LSM1 extension requires that you specify a ' + 'filename for the LSM database.') + else: + if len(filename) >= 2 and filename[0] != '"': + filename = '"%s"' % filename + if not cls._meta.primary_key: + raise ValueError('LSM1 models must specify a primary-key field.') + + key = cls._meta.primary_key + if isinstance(key, AutoField): + raise ValueError('LSM1 models must explicitly declare a primary ' + 'key field.') + if not isinstance(key, (TextField, BlobField, IntegerField)): + raise ValueError('LSM1 key must be a TextField, BlobField, or ' + 'IntegerField.') + key._hidden = True + if isinstance(key, IntegerField): + data_type = 'UINT' + elif isinstance(key, BlobField): + data_type = 'BLOB' + else: + data_type = 'TEXT' + cls._meta.prefix_arguments = [filename, '"%s"' % key.name, data_type] + + # Does the key map to a scalar value, or a tuple of values? + if len(cls._meta.sorted_fields) == 2: + cls._meta._value_field = cls._meta.sorted_fields[1] + else: + cls._meta._value_field = None + + return options + + @classmethod + def load_extension(cls, path='lsm.so'): + cls._meta.database.load_extension(path) + + @staticmethod + def slice_to_expr(key, idx): + if idx.start is not None and idx.stop is not None: + return key.between(idx.start, idx.stop) + elif idx.start is not None: + return key >= idx.start + elif idx.stop is not None: + return key <= idx.stop + + @staticmethod + def _apply_lookup_to_query(query, key, lookup): + if isinstance(lookup, slice): + expr = LSMTable.slice_to_expr(key, lookup) + if expr is not None: + query = query.where(expr) + return query, False + elif isinstance(lookup, Expression): + return query.where(lookup), False + else: + return query.where(key == lookup), True + + @classmethod + def get_by_id(cls, pk): + query, is_single = cls._apply_lookup_to_query( + cls.select().namedtuples(), + cls._meta.primary_key, + pk) + + if is_single: + row = query.get() + return row[1] if cls._meta._value_field is not None else row + else: + return query + + @classmethod + def set_by_id(cls, key, value): + if cls._meta._value_field is not None: + data = {cls._meta._value_field: value} + elif isinstance(value, tuple): + data = {} + for field, fval in zip(cls._meta.sorted_fields[1:], value): + data[field] = fval + elif isinstance(value, dict): + data = value + elif isinstance(value, cls): + data = value.__dict__ + data[cls._meta.primary_key] = key + cls.replace(data).execute() + + @classmethod + def delete_by_id(cls, pk): + query, is_single = cls._apply_lookup_to_query( + cls.delete(), + cls._meta.primary_key, + pk) + return query.execute() + + +OP.MATCH = 'MATCH' + +def _sqlite_regexp(regex, value): + return re.search(regex, value) is not None + + +class SqliteExtDatabase(SqliteDatabase): + def __init__(self, database, c_extensions=None, rank_functions=True, + hash_functions=False, regexp_function=False, + bloomfilter=False, json_contains=False, *args, **kwargs): + super(SqliteExtDatabase, self).__init__(database, *args, **kwargs) + self._row_factory = None + + if c_extensions and not CYTHON_SQLITE_EXTENSIONS: + raise ImproperlyConfigured('SqliteExtDatabase initialized with ' + 'C extensions, but shared library was ' + 'not found!') + prefer_c = CYTHON_SQLITE_EXTENSIONS and (c_extensions is not False) + if rank_functions: + if prefer_c: + register_rank_functions(self) + else: + self.register_function(bm25, 'fts_bm25') + self.register_function(rank, 'fts_rank') + self.register_function(bm25, 'fts_bm25f') # Fall back to bm25. + self.register_function(bm25, 'fts_lucene') + if hash_functions: + if not prefer_c: + raise ValueError('C extension required to register hash ' + 'functions.') + register_hash_functions(self) + if regexp_function: + self.register_function(_sqlite_regexp, 'regexp', 2) + if bloomfilter: + if not prefer_c: + raise ValueError('C extension required to use bloomfilter.') + register_bloomfilter(self) + if json_contains: + self.register_function(_json_contains, 'json_contains') + + self._c_extensions = prefer_c + + def _add_conn_hooks(self, conn): + super(SqliteExtDatabase, self)._add_conn_hooks(conn) + if self._row_factory: + conn.row_factory = self._row_factory + + def row_factory(self, fn): + self._row_factory = fn + + +if CYTHON_SQLITE_EXTENSIONS: + SQLITE_STATUS_MEMORY_USED = 0 + SQLITE_STATUS_PAGECACHE_USED = 1 + SQLITE_STATUS_PAGECACHE_OVERFLOW = 2 + SQLITE_STATUS_SCRATCH_USED = 3 + SQLITE_STATUS_SCRATCH_OVERFLOW = 4 + SQLITE_STATUS_MALLOC_SIZE = 5 + SQLITE_STATUS_PARSER_STACK = 6 + SQLITE_STATUS_PAGECACHE_SIZE = 7 + SQLITE_STATUS_SCRATCH_SIZE = 8 + SQLITE_STATUS_MALLOC_COUNT = 9 + SQLITE_DBSTATUS_LOOKASIDE_USED = 0 + SQLITE_DBSTATUS_CACHE_USED = 1 + SQLITE_DBSTATUS_SCHEMA_USED = 2 + SQLITE_DBSTATUS_STMT_USED = 3 + SQLITE_DBSTATUS_LOOKASIDE_HIT = 4 + SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE = 5 + SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL = 6 + SQLITE_DBSTATUS_CACHE_HIT = 7 + SQLITE_DBSTATUS_CACHE_MISS = 8 + SQLITE_DBSTATUS_CACHE_WRITE = 9 + SQLITE_DBSTATUS_DEFERRED_FKS = 10 + #SQLITE_DBSTATUS_CACHE_USED_SHARED = 11 + + def __status__(flag, return_highwater=False): + """ + Expose a sqlite3_status() call for a particular flag as a property of + the Database object. + """ + def getter(self): + result = sqlite_get_status(flag) + return result[1] if return_highwater else result + return property(getter) + + def __dbstatus__(flag, return_highwater=False, return_current=False): + """ + Expose a sqlite3_dbstatus() call for a particular flag as a property of + the Database instance. Unlike sqlite3_status(), the dbstatus properties + pertain to the current connection. + """ + def getter(self): + if self._state.conn is None: + raise ImproperlyConfigured('database connection not opened.') + result = sqlite_get_db_status(self._state.conn, flag) + if return_current: + return result[0] + return result[1] if return_highwater else result + return property(getter) + + class CSqliteExtDatabase(SqliteExtDatabase): + def __init__(self, *args, **kwargs): + self._conn_helper = None + self._commit_hook = self._rollback_hook = self._update_hook = None + self._replace_busy_handler = False + super(CSqliteExtDatabase, self).__init__(*args, **kwargs) + + def init(self, database, replace_busy_handler=False, **kwargs): + super(CSqliteExtDatabase, self).init(database, **kwargs) + self._replace_busy_handler = replace_busy_handler + + def _close(self, conn): + if self._commit_hook: + self._conn_helper.set_commit_hook(None) + if self._rollback_hook: + self._conn_helper.set_rollback_hook(None) + if self._update_hook: + self._conn_helper.set_update_hook(None) + return super(CSqliteExtDatabase, self)._close(conn) + + def _add_conn_hooks(self, conn): + super(CSqliteExtDatabase, self)._add_conn_hooks(conn) + self._conn_helper = ConnectionHelper(conn) + if self._commit_hook is not None: + self._conn_helper.set_commit_hook(self._commit_hook) + if self._rollback_hook is not None: + self._conn_helper.set_rollback_hook(self._rollback_hook) + if self._update_hook is not None: + self._conn_helper.set_update_hook(self._update_hook) + if self._replace_busy_handler: + timeout = self._timeout or 5 + self._conn_helper.set_busy_handler(timeout * 1000) + + def on_commit(self, fn): + self._commit_hook = fn + if not self.is_closed(): + self._conn_helper.set_commit_hook(fn) + return fn + + def on_rollback(self, fn): + self._rollback_hook = fn + if not self.is_closed(): + self._conn_helper.set_rollback_hook(fn) + return fn + + def on_update(self, fn): + self._update_hook = fn + if not self.is_closed(): + self._conn_helper.set_update_hook(fn) + return fn + + def changes(self): + return self._conn_helper.changes() + + @property + def last_insert_rowid(self): + return self._conn_helper.last_insert_rowid() + + @property + def autocommit(self): + return self._conn_helper.autocommit() + + def backup(self, destination, pages=None, name=None, progress=None): + return backup(self.connection(), destination.connection(), + pages=pages, name=name, progress=progress) + + def backup_to_file(self, filename, pages=None, name=None, + progress=None): + return backup_to_file(self.connection(), filename, pages=pages, + name=name, progress=progress) + + def blob_open(self, table, column, rowid, read_only=False): + return Blob(self, table, column, rowid, read_only) + + # Status properties. + memory_used = __status__(SQLITE_STATUS_MEMORY_USED) + malloc_size = __status__(SQLITE_STATUS_MALLOC_SIZE, True) + malloc_count = __status__(SQLITE_STATUS_MALLOC_COUNT) + pagecache_used = __status__(SQLITE_STATUS_PAGECACHE_USED) + pagecache_overflow = __status__(SQLITE_STATUS_PAGECACHE_OVERFLOW) + pagecache_size = __status__(SQLITE_STATUS_PAGECACHE_SIZE, True) + scratch_used = __status__(SQLITE_STATUS_SCRATCH_USED) + scratch_overflow = __status__(SQLITE_STATUS_SCRATCH_OVERFLOW) + scratch_size = __status__(SQLITE_STATUS_SCRATCH_SIZE, True) + + # Connection status properties. + lookaside_used = __dbstatus__(SQLITE_DBSTATUS_LOOKASIDE_USED) + lookaside_hit = __dbstatus__(SQLITE_DBSTATUS_LOOKASIDE_HIT, True) + lookaside_miss = __dbstatus__(SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE, + True) + lookaside_miss_full = __dbstatus__(SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL, + True) + cache_used = __dbstatus__(SQLITE_DBSTATUS_CACHE_USED, False, True) + #cache_used_shared = __dbstatus__(SQLITE_DBSTATUS_CACHE_USED_SHARED, + # False, True) + schema_used = __dbstatus__(SQLITE_DBSTATUS_SCHEMA_USED, False, True) + statement_used = __dbstatus__(SQLITE_DBSTATUS_STMT_USED, False, True) + cache_hit = __dbstatus__(SQLITE_DBSTATUS_CACHE_HIT, False, True) + cache_miss = __dbstatus__(SQLITE_DBSTATUS_CACHE_MISS, False, True) + cache_write = __dbstatus__(SQLITE_DBSTATUS_CACHE_WRITE, False, True) + + +def match(lhs, rhs): + return Expression(lhs, OP.MATCH, rhs) + +def _parse_match_info(buf): + # See http://sqlite.org/fts3.html#matchinfo + bufsize = len(buf) # Length in bytes. + return [struct.unpack('@I', buf[i:i+4])[0] for i in range(0, bufsize, 4)] + +def get_weights(ncol, raw_weights): + if not raw_weights: + return [1] * ncol + else: + weights = [0] * ncol + for i, weight in enumerate(raw_weights): + weights[i] = weight + return weights + +# Ranking implementation, which parse matchinfo. +def rank(raw_match_info, *raw_weights): + # Handle match_info called w/default args 'pcx' - based on the example rank + # function http://sqlite.org/fts3.html#appendix_a + match_info = _parse_match_info(raw_match_info) + score = 0.0 + + p, c = match_info[:2] + weights = get_weights(c, raw_weights) + + # matchinfo X value corresponds to, for each phrase in the search query, a + # list of 3 values for each column in the search table. + # So if we have a two-phrase search query and three columns of data, the + # following would be the layout: + # p0 : c0=[0, 1, 2], c1=[3, 4, 5], c2=[6, 7, 8] + # p1 : c0=[9, 10, 11], c1=[12, 13, 14], c2=[15, 16, 17] + for phrase_num in range(p): + phrase_info_idx = 2 + (phrase_num * c * 3) + for col_num in range(c): + weight = weights[col_num] + if not weight: + continue + + col_idx = phrase_info_idx + (col_num * 3) + + # The idea is that we count the number of times the phrase appears + # in this column of the current row, compared to how many times it + # appears in this column across all rows. The ratio of these values + # provides a rough way to score based on "high value" terms. + row_hits = match_info[col_idx] + all_rows_hits = match_info[col_idx + 1] + if row_hits > 0: + score += weight * (float(row_hits) / all_rows_hits) + + return -score + +# Okapi BM25 ranking implementation (FTS4 only). +def bm25(raw_match_info, *args): + """ + Usage: + + # Format string *must* be pcnalx + # Second parameter to bm25 specifies the index of the column, on + # the table being queries. + bm25(matchinfo(document_tbl, 'pcnalx'), 1) AS rank + """ + match_info = _parse_match_info(raw_match_info) + K = 1.2 + B = 0.75 + score = 0.0 + + P_O, C_O, N_O, A_O = range(4) # Offsets into the matchinfo buffer. + term_count = match_info[P_O] # n + col_count = match_info[C_O] + total_docs = match_info[N_O] # N + L_O = A_O + col_count + X_O = L_O + col_count + + # Worked example of pcnalx for two columns and two phrases, 100 docs total. + # { + # p = 2 + # c = 2 + # n = 100 + # a0 = 4 -- avg number of tokens for col0, e.g. title + # a1 = 40 -- avg number of tokens for col1, e.g. body + # l0 = 5 -- curr doc has 5 tokens in col0 + # l1 = 30 -- curr doc has 30 tokens in col1 + # + # x000 -- hits this row for phrase0, col0 + # x001 -- hits all rows for phrase0, col0 + # x002 -- rows with phrase0 in col0 at least once + # + # x010 -- hits this row for phrase0, col1 + # x011 -- hits all rows for phrase0, col1 + # x012 -- rows with phrase0 in col1 at least once + # + # x100 -- hits this row for phrase1, col0 + # x101 -- hits all rows for phrase1, col0 + # x102 -- rows with phrase1 in col0 at least once + # + # x110 -- hits this row for phrase1, col1 + # x111 -- hits all rows for phrase1, col1 + # x112 -- rows with phrase1 in col1 at least once + # } + + weights = get_weights(col_count, args) + + for i in range(term_count): + for j in range(col_count): + weight = weights[j] + if weight == 0: + continue + + x = X_O + (3 * (j + i * col_count)) + term_frequency = float(match_info[x]) # f(qi, D) + docs_with_term = float(match_info[x + 2]) # n(qi) + + # log( (N - n(qi) + 0.5) / (n(qi) + 0.5) ) + idf = math.log( + (total_docs - docs_with_term + 0.5) / + (docs_with_term + 0.5)) + if idf <= 0.0: + idf = 1e-6 + + doc_length = float(match_info[L_O + j]) # |D| + avg_length = float(match_info[A_O + j]) or 1. # avgdl + ratio = doc_length / avg_length + + num = term_frequency * (K + 1.0) + b_part = 1.0 - B + (B * ratio) + denom = term_frequency + (K * b_part) + + pc_score = idf * (num / denom) + score += (pc_score * weight) + + return -score + + +def _json_contains(src_json, obj_json): + stack = [] + try: + stack.append((json.loads(obj_json), json.loads(src_json))) + except: + # Invalid JSON! + return False + + while stack: + obj, src = stack.pop() + if isinstance(src, dict): + if isinstance(obj, dict): + for key in obj: + if key not in src: + return False + stack.append((obj[key], src[key])) + elif isinstance(obj, list): + for item in obj: + if item not in src: + return False + elif obj not in src: + return False + elif isinstance(src, list): + if isinstance(obj, dict): + return False + elif isinstance(obj, list): + try: + for i in range(len(obj)): + stack.append((obj[i], src[i])) + except IndexError: + return False + elif obj not in src: + return False + elif obj != src: + return False + return True diff --git a/python3.9libs/playhouse/sqlite_udf.py b/python3.9libs/playhouse/sqlite_udf.py new file mode 100644 index 0000000..050dc9b --- /dev/null +++ b/python3.9libs/playhouse/sqlite_udf.py @@ -0,0 +1,536 @@ +import datetime +import hashlib +import heapq +import math +import os +import random +import re +import sys +import threading +import zlib +try: + from collections import Counter +except ImportError: + Counter = None +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse + +try: + from playhouse._sqlite_ext import TableFunction +except ImportError: + TableFunction = None + + +SQLITE_DATETIME_FORMATS = ( + '%Y-%m-%d %H:%M:%S', + '%Y-%m-%d %H:%M:%S.%f', + '%Y-%m-%d', + '%H:%M:%S', + '%H:%M:%S.%f', + '%H:%M') + +from peewee import format_date_time + +def format_date_time_sqlite(date_value): + return format_date_time(date_value, SQLITE_DATETIME_FORMATS) + +try: + from playhouse import _sqlite_udf as cython_udf +except ImportError: + cython_udf = None + + +# Group udf by function. +CONTROL_FLOW = 'control_flow' +DATE = 'date' +FILE = 'file' +HELPER = 'helpers' +MATH = 'math' +STRING = 'string' + +AGGREGATE_COLLECTION = {} +TABLE_FUNCTION_COLLECTION = {} +UDF_COLLECTION = {} + + +class synchronized_dict(dict): + def __init__(self, *args, **kwargs): + super(synchronized_dict, self).__init__(*args, **kwargs) + self._lock = threading.Lock() + + def __getitem__(self, key): + with self._lock: + return super(synchronized_dict, self).__getitem__(key) + + def __setitem__(self, key, value): + with self._lock: + return super(synchronized_dict, self).__setitem__(key, value) + + def __delitem__(self, key): + with self._lock: + return super(synchronized_dict, self).__delitem__(key) + + +STATE = synchronized_dict() +SETTINGS = synchronized_dict() + +# Class and function decorators. +def aggregate(*groups): + def decorator(klass): + for group in groups: + AGGREGATE_COLLECTION.setdefault(group, []) + AGGREGATE_COLLECTION[group].append(klass) + return klass + return decorator + +def table_function(*groups): + def decorator(klass): + for group in groups: + TABLE_FUNCTION_COLLECTION.setdefault(group, []) + TABLE_FUNCTION_COLLECTION[group].append(klass) + return klass + return decorator + +def udf(*groups): + def decorator(fn): + for group in groups: + UDF_COLLECTION.setdefault(group, []) + UDF_COLLECTION[group].append(fn) + return fn + return decorator + +# Register aggregates / functions with connection. +def register_aggregate_groups(db, *groups): + seen = set() + for group in groups: + klasses = AGGREGATE_COLLECTION.get(group, ()) + for klass in klasses: + name = getattr(klass, 'name', klass.__name__) + if name not in seen: + seen.add(name) + db.register_aggregate(klass, name) + +def register_table_function_groups(db, *groups): + seen = set() + for group in groups: + klasses = TABLE_FUNCTION_COLLECTION.get(group, ()) + for klass in klasses: + if klass.name not in seen: + seen.add(klass.name) + db.register_table_function(klass) + +def register_udf_groups(db, *groups): + seen = set() + for group in groups: + functions = UDF_COLLECTION.get(group, ()) + for function in functions: + name = function.__name__ + if name not in seen: + seen.add(name) + db.register_function(function, name) + +def register_groups(db, *groups): + register_aggregate_groups(db, *groups) + register_table_function_groups(db, *groups) + register_udf_groups(db, *groups) + +def register_all(db): + register_aggregate_groups(db, *AGGREGATE_COLLECTION) + register_table_function_groups(db, *TABLE_FUNCTION_COLLECTION) + register_udf_groups(db, *UDF_COLLECTION) + + +# Begin actual user-defined functions and aggregates. + +# Scalar functions. +@udf(CONTROL_FLOW) +def if_then_else(cond, truthy, falsey=None): + if cond: + return truthy + return falsey + +@udf(DATE) +def strip_tz(date_str): + date_str = date_str.replace('T', ' ') + tz_idx1 = date_str.find('+') + if tz_idx1 != -1: + return date_str[:tz_idx1] + tz_idx2 = date_str.find('-') + if tz_idx2 > 13: + return date_str[:tz_idx2] + return date_str + +@udf(DATE) +def human_delta(nseconds, glue=', '): + parts = ( + (86400 * 365, 'year'), + (86400 * 30, 'month'), + (86400 * 7, 'week'), + (86400, 'day'), + (3600, 'hour'), + (60, 'minute'), + (1, 'second'), + ) + accum = [] + for offset, name in parts: + val, nseconds = divmod(nseconds, offset) + if val: + suffix = val != 1 and 's' or '' + accum.append('%s %s%s' % (val, name, suffix)) + if not accum: + return '0 seconds' + return glue.join(accum) + +@udf(FILE) +def file_ext(filename): + try: + res = os.path.splitext(filename) + except ValueError: + return None + return res[1] + +@udf(FILE) +def file_read(filename): + try: + with open(filename) as fh: + return fh.read() + except: + pass + +if sys.version_info[0] == 2: + @udf(HELPER) + def gzip(data, compression=9): + return buffer(zlib.compress(data, compression)) + + @udf(HELPER) + def gunzip(data): + return zlib.decompress(data) +else: + @udf(HELPER) + def gzip(data, compression=9): + if isinstance(data, str): + data = bytes(data.encode('raw_unicode_escape')) + return zlib.compress(data, compression) + + @udf(HELPER) + def gunzip(data): + return zlib.decompress(data) + +@udf(HELPER) +def hostname(url): + parse_result = urlparse(url) + if parse_result: + return parse_result.netloc + +@udf(HELPER) +def toggle(key): + key = key.lower() + STATE[key] = ret = not STATE.get(key) + return ret + +@udf(HELPER) +def setting(key, value=None): + if value is None: + return SETTINGS.get(key) + else: + SETTINGS[key] = value + return value + +@udf(HELPER) +def clear_settings(): + SETTINGS.clear() + +@udf(HELPER) +def clear_toggles(): + STATE.clear() + +@udf(MATH) +def randomrange(start, end=None, step=None): + if end is None: + start, end = 0, start + elif step is None: + step = 1 + return random.randrange(start, end, step) + +@udf(MATH) +def gauss_distribution(mean, sigma): + try: + return random.gauss(mean, sigma) + except ValueError: + return None + +@udf(MATH) +def sqrt(n): + try: + return math.sqrt(n) + except ValueError: + return None + +@udf(MATH) +def tonumber(s): + try: + return int(s) + except ValueError: + try: + return float(s) + except: + return None + +@udf(STRING) +def substr_count(haystack, needle): + if not haystack or not needle: + return 0 + return haystack.count(needle) + +@udf(STRING) +def strip_chars(haystack, chars): + return haystack.strip(chars) + +def _hash(constructor, *args): + hash_obj = constructor() + for arg in args: + hash_obj.update(arg) + return hash_obj.hexdigest() + +# Aggregates. +class _heap_agg(object): + def __init__(self): + self.heap = [] + self.ct = 0 + + def process(self, value): + return value + + def step(self, value): + self.ct += 1 + heapq.heappush(self.heap, self.process(value)) + +class _datetime_heap_agg(_heap_agg): + def process(self, value): + return format_date_time_sqlite(value) + +if sys.version_info[:2] == (2, 6): + def total_seconds(td): + return (td.seconds + + (td.days * 86400) + + (td.microseconds / (10.**6))) +else: + total_seconds = lambda td: td.total_seconds() + +@aggregate(DATE) +class mintdiff(_datetime_heap_agg): + def finalize(self): + dtp = min_diff = None + while self.heap: + if min_diff is None: + if dtp is None: + dtp = heapq.heappop(self.heap) + continue + dt = heapq.heappop(self.heap) + diff = dt - dtp + if min_diff is None or min_diff > diff: + min_diff = diff + dtp = dt + if min_diff is not None: + return total_seconds(min_diff) + +@aggregate(DATE) +class avgtdiff(_datetime_heap_agg): + def finalize(self): + if self.ct < 1: + return + elif self.ct == 1: + return 0 + + total = ct = 0 + dtp = None + while self.heap: + if total == 0: + if dtp is None: + dtp = heapq.heappop(self.heap) + continue + + dt = heapq.heappop(self.heap) + diff = dt - dtp + ct += 1 + total += total_seconds(diff) + dtp = dt + + return float(total) / ct + +@aggregate(DATE) +class duration(object): + def __init__(self): + self._min = self._max = None + + def step(self, value): + dt = format_date_time_sqlite(value) + if self._min is None or dt < self._min: + self._min = dt + if self._max is None or dt > self._max: + self._max = dt + + def finalize(self): + if self._min and self._max: + td = (self._max - self._min) + return total_seconds(td) + return None + +@aggregate(MATH) +class mode(object): + if Counter: + def __init__(self): + self.items = Counter() + + def step(self, *args): + self.items.update(args) + + def finalize(self): + if self.items: + return self.items.most_common(1)[0][0] + else: + def __init__(self): + self.items = [] + + def step(self, item): + self.items.append(item) + + def finalize(self): + if self.items: + return max(set(self.items), key=self.items.count) + +@aggregate(MATH) +class minrange(_heap_agg): + def finalize(self): + if self.ct == 0: + return + elif self.ct == 1: + return 0 + + prev = min_diff = None + + while self.heap: + if min_diff is None: + if prev is None: + prev = heapq.heappop(self.heap) + continue + curr = heapq.heappop(self.heap) + diff = curr - prev + if min_diff is None or min_diff > diff: + min_diff = diff + prev = curr + return min_diff + +@aggregate(MATH) +class avgrange(_heap_agg): + def finalize(self): + if self.ct == 0: + return + elif self.ct == 1: + return 0 + + total = ct = 0 + prev = None + while self.heap: + if total == 0: + if prev is None: + prev = heapq.heappop(self.heap) + continue + + curr = heapq.heappop(self.heap) + diff = curr - prev + ct += 1 + total += diff + prev = curr + + return float(total) / ct + +@aggregate(MATH) +class _range(object): + name = 'range' + + def __init__(self): + self._min = self._max = None + + def step(self, value): + if self._min is None or value < self._min: + self._min = value + if self._max is None or value > self._max: + self._max = value + + def finalize(self): + if self._min is not None and self._max is not None: + return self._max - self._min + return None + +@aggregate(MATH) +class stddev(object): + def __init__(self): + self.n = 0 + self.values = [] + def step(self, v): + self.n += 1 + self.values.append(v) + def finalize(self): + if self.n <= 1: + return 0 + mean = sum(self.values) / self.n + return math.sqrt(sum((i - mean) ** 2 for i in self.values) / (self.n - 1)) + + +if cython_udf is not None: + damerau_levenshtein_dist = udf(STRING)(cython_udf.damerau_levenshtein_dist) + levenshtein_dist = udf(STRING)(cython_udf.levenshtein_dist) + str_dist = udf(STRING)(cython_udf.str_dist) + median = aggregate(MATH)(cython_udf.median) + + +if TableFunction is not None: + @table_function(STRING) + class RegexSearch(TableFunction): + params = ['regex', 'search_string'] + columns = ['match'] + name = 'regex_search' + + def initialize(self, regex=None, search_string=None): + self._iter = re.finditer(regex, search_string) + + def iterate(self, idx): + return (next(self._iter).group(0),) + + @table_function(DATE) + class DateSeries(TableFunction): + params = ['start', 'stop', 'step_seconds'] + columns = ['date'] + name = 'date_series' + + def initialize(self, start, stop, step_seconds=86400): + self.start = format_date_time_sqlite(start) + self.stop = format_date_time_sqlite(stop) + step_seconds = int(step_seconds) + self.step_seconds = datetime.timedelta(seconds=step_seconds) + + if (self.start.hour == 0 and + self.start.minute == 0 and + self.start.second == 0 and + step_seconds >= 86400): + self.format = '%Y-%m-%d' + elif (self.start.year == 1900 and + self.start.month == 1 and + self.start.day == 1 and + self.stop.year == 1900 and + self.stop.month == 1 and + self.stop.day == 1 and + step_seconds < 86400): + self.format = '%H:%M:%S' + else: + self.format = '%Y-%m-%d %H:%M:%S' + + def iterate(self, idx): + if self.start > self.stop: + raise StopIteration + current = self.start + self.start += self.step_seconds + return (current.strftime(self.format),) diff --git a/python3.9libs/playhouse/sqliteq.py b/python3.9libs/playhouse/sqliteq.py new file mode 100644 index 0000000..e001587 --- /dev/null +++ b/python3.9libs/playhouse/sqliteq.py @@ -0,0 +1,331 @@ +import logging +import weakref +from threading import local as thread_local +from threading import Event +from threading import Thread +try: + from Queue import Queue +except ImportError: + from queue import Queue + +try: + import gevent + from gevent import Greenlet as GThread + from gevent.event import Event as GEvent + from gevent.local import local as greenlet_local + from gevent.queue import Queue as GQueue +except ImportError: + GThread = GQueue = GEvent = None + +from peewee import SENTINEL +from playhouse.sqlite_ext import SqliteExtDatabase + + +logger = logging.getLogger('peewee.sqliteq') + + +class ResultTimeout(Exception): + pass + +class WriterPaused(Exception): + pass + +class ShutdownException(Exception): + pass + + +class AsyncCursor(object): + __slots__ = ('sql', 'params', 'commit', 'timeout', + '_event', '_cursor', '_exc', '_idx', '_rows', '_ready') + + def __init__(self, event, sql, params, commit, timeout): + self._event = event + self.sql = sql + self.params = params + self.commit = commit + self.timeout = timeout + self._cursor = self._exc = self._idx = self._rows = None + self._ready = False + + def set_result(self, cursor, exc=None): + self._cursor = cursor + self._exc = exc + self._idx = 0 + self._rows = cursor.fetchall() if exc is None else [] + self._event.set() + return self + + def _wait(self, timeout=None): + timeout = timeout if timeout is not None else self.timeout + if not self._event.wait(timeout=timeout) and timeout: + raise ResultTimeout('results not ready, timed out.') + if self._exc is not None: + raise self._exc + self._ready = True + + def __iter__(self): + if not self._ready: + self._wait() + if self._exc is not None: + raise self._exc + return self + + def next(self): + if not self._ready: + self._wait() + try: + obj = self._rows[self._idx] + except IndexError: + raise StopIteration + else: + self._idx += 1 + return obj + __next__ = next + + @property + def lastrowid(self): + if not self._ready: + self._wait() + return self._cursor.lastrowid + + @property + def rowcount(self): + if not self._ready: + self._wait() + return self._cursor.rowcount + + @property + def description(self): + return self._cursor.description + + def close(self): + self._cursor.close() + + def fetchall(self): + return list(self) # Iterating implies waiting until populated. + + def fetchone(self): + if not self._ready: + self._wait() + try: + return next(self) + except StopIteration: + return None + +SHUTDOWN = StopIteration +PAUSE = object() +UNPAUSE = object() + + +class Writer(object): + __slots__ = ('database', 'queue') + + def __init__(self, database, queue): + self.database = database + self.queue = queue + + def run(self): + conn = self.database.connection() + try: + while True: + try: + if conn is None: # Paused. + if self.wait_unpause(): + conn = self.database.connection() + else: + conn = self.loop(conn) + except ShutdownException: + logger.info('writer received shutdown request, exiting.') + return + finally: + if conn is not None: + self.database._close(conn) + self.database._state.reset() + + def wait_unpause(self): + obj = self.queue.get() + if obj is UNPAUSE: + logger.info('writer unpaused - reconnecting to database.') + return True + elif obj is SHUTDOWN: + raise ShutdownException() + elif obj is PAUSE: + logger.error('writer received pause, but is already paused.') + else: + obj.set_result(None, WriterPaused()) + logger.warning('writer paused, not handling %s', obj) + + def loop(self, conn): + obj = self.queue.get() + if isinstance(obj, AsyncCursor): + self.execute(obj) + elif obj is PAUSE: + logger.info('writer paused - closing database connection.') + self.database._close(conn) + self.database._state.reset() + return + elif obj is UNPAUSE: + logger.error('writer received unpause, but is already running.') + elif obj is SHUTDOWN: + raise ShutdownException() + else: + logger.error('writer received unsupported object: %s', obj) + return conn + + def execute(self, obj): + logger.debug('received query %s', obj.sql) + try: + cursor = self.database._execute(obj.sql, obj.params, obj.commit) + except Exception as execute_err: + cursor = None + exc = execute_err # python3 is so fucking lame. + else: + exc = None + return obj.set_result(cursor, exc) + + +class SqliteQueueDatabase(SqliteExtDatabase): + WAL_MODE_ERROR_MESSAGE = ('SQLite must be configured to use the WAL ' + 'journal mode when using this feature. WAL mode ' + 'allows one or more readers to continue reading ' + 'while another connection writes to the ' + 'database.') + + def __init__(self, database, use_gevent=False, autostart=True, + queue_max_size=None, results_timeout=None, *args, **kwargs): + kwargs['check_same_thread'] = False + + # Ensure that journal_mode is WAL. This value is passed to the parent + # class constructor below. + pragmas = self._validate_journal_mode(kwargs.pop('pragmas', None)) + + # Reference to execute_sql on the parent class. Since we've overridden + # execute_sql(), this is just a handy way to reference the real + # implementation. + Parent = super(SqliteQueueDatabase, self) + self._execute = Parent.execute_sql + + # Call the parent class constructor with our modified pragmas. + Parent.__init__(database, pragmas=pragmas, *args, **kwargs) + + self._autostart = autostart + self._results_timeout = results_timeout + self._is_stopped = True + + # Get different objects depending on the threading implementation. + self._thread_helper = self.get_thread_impl(use_gevent)(queue_max_size) + + # Create the writer thread, optionally starting it. + self._create_write_queue() + if self._autostart: + self.start() + + def get_thread_impl(self, use_gevent): + return GreenletHelper if use_gevent else ThreadHelper + + def _validate_journal_mode(self, pragmas=None): + if not pragmas: + return {'journal_mode': 'wal'} + + if not isinstance(pragmas, dict): + pragmas = dict((k.lower(), v) for (k, v) in pragmas) + if pragmas.get('journal_mode', 'wal').lower() != 'wal': + raise ValueError(self.WAL_MODE_ERROR_MESSAGE) + + pragmas['journal_mode'] = 'wal' + return pragmas + + def _create_write_queue(self): + self._write_queue = self._thread_helper.queue() + + def queue_size(self): + return self._write_queue.qsize() + + def execute_sql(self, sql, params=None, commit=SENTINEL, timeout=None): + if commit is SENTINEL: + commit = not sql.lower().startswith('select') + + if not commit: + return self._execute(sql, params, commit=commit) + + cursor = AsyncCursor( + event=self._thread_helper.event(), + sql=sql, + params=params, + commit=commit, + timeout=self._results_timeout if timeout is None else timeout) + self._write_queue.put(cursor) + return cursor + + def start(self): + with self._lock: + if not self._is_stopped: + return False + def run(): + writer = Writer(self, self._write_queue) + writer.run() + + self._writer = self._thread_helper.thread(run) + self._writer.start() + self._is_stopped = False + return True + + def stop(self): + logger.debug('environment stop requested.') + with self._lock: + if self._is_stopped: + return False + self._write_queue.put(SHUTDOWN) + self._writer.join() + self._is_stopped = True + return True + + def is_stopped(self): + with self._lock: + return self._is_stopped + + def pause(self): + with self._lock: + self._write_queue.put(PAUSE) + + def unpause(self): + with self._lock: + self._write_queue.put(UNPAUSE) + + def __unsupported__(self, *args, **kwargs): + raise ValueError('This method is not supported by %r.' % type(self)) + atomic = transaction = savepoint = __unsupported__ + + +class ThreadHelper(object): + __slots__ = ('queue_max_size',) + + def __init__(self, queue_max_size=None): + self.queue_max_size = queue_max_size + + def event(self): return Event() + + def queue(self, max_size=None): + max_size = max_size if max_size is not None else self.queue_max_size + return Queue(maxsize=max_size or 0) + + def thread(self, fn, *args, **kwargs): + thread = Thread(target=fn, args=args, kwargs=kwargs) + thread.daemon = True + return thread + + +class GreenletHelper(ThreadHelper): + __slots__ = () + + def event(self): return GEvent() + + def queue(self, max_size=None): + max_size = max_size if max_size is not None else self.queue_max_size + return GQueue(maxsize=max_size or 0) + + def thread(self, fn, *args, **kwargs): + def wrap(*a, **k): + gevent.sleep() + return fn(*a, **k) + return GThread(wrap, *args, **kwargs) diff --git a/python3.9libs/playhouse/test_utils.py b/python3.9libs/playhouse/test_utils.py new file mode 100644 index 0000000..333dc07 --- /dev/null +++ b/python3.9libs/playhouse/test_utils.py @@ -0,0 +1,62 @@ +from functools import wraps +import logging + + +logger = logging.getLogger('peewee') + + +class _QueryLogHandler(logging.Handler): + def __init__(self, *args, **kwargs): + self.queries = [] + logging.Handler.__init__(self, *args, **kwargs) + + def emit(self, record): + self.queries.append(record) + + +class count_queries(object): + def __init__(self, only_select=False): + self.only_select = only_select + self.count = 0 + + def get_queries(self): + return self._handler.queries + + def __enter__(self): + self._handler = _QueryLogHandler() + logger.setLevel(logging.DEBUG) + logger.addHandler(self._handler) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + logger.removeHandler(self._handler) + if self.only_select: + self.count = len([q for q in self._handler.queries + if q.msg[0].startswith('SELECT ')]) + else: + self.count = len(self._handler.queries) + + +class assert_query_count(count_queries): + def __init__(self, expected, only_select=False): + super(assert_query_count, self).__init__(only_select=only_select) + self.expected = expected + + def __call__(self, f): + @wraps(f) + def decorated(*args, **kwds): + with self: + ret = f(*args, **kwds) + + self._assert_count() + return ret + + return decorated + + def _assert_count(self): + error_msg = '%s != %s' % (self.count, self.expected) + assert self.count == self.expected, error_msg + + def __exit__(self, exc_type, exc_val, exc_tb): + super(assert_query_count, self).__exit__(exc_type, exc_val, exc_tb) + self._assert_count() diff --git a/python3.9libs/pythonrc.py b/python3.9libs/pythonrc.py new file mode 100644 index 0000000..8f42833 --- /dev/null +++ b/python3.9libs/pythonrc.py @@ -0,0 +1,24 @@ +from __future__ import print_function +from __future__ import absolute_import + +import inspect +import os + +from searcher import searchersetup + +# info +__author__ = "instance.id" +__copyright__ = "2020 All rights reserved." +__status__ = "Release Candidate" + +# current_file_path = os.path.abspath( +# inspect.getsourcefile(lambda: 0) +# ) + + +def main(): + searchersetup.main() + + +if __name__ == '__main__': + main() diff --git a/python3.9libs/qtpy/Qt3DAnimation.py b/python3.9libs/qtpy/Qt3DAnimation.py new file mode 100644 index 0000000..c6625b2 --- /dev/null +++ b/python3.9libs/qtpy/Qt3DAnimation.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides Qt3DAnimation classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError, PYSIDE_VERSION +from .py3compat import PY2 + +if PYQT5: + from PyQt5.Qt3DAnimation import * +elif PYSIDE2: + if not PY2 or (PY2 and PYSIDE_VERSION < '5.12.4'): + # https://bugreports.qt.io/projects/PYSIDE/issues/PYSIDE-1026 + import PySide2.Qt3DAnimation as __temp + import inspect + for __name in inspect.getmembers(__temp.Qt3DAnimation): + globals()[__name[0]] = __name[1] + else: + raise PythonQtError('A bug in Shiboken prevents this') +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/Qt3DCore.py b/python3.9libs/qtpy/Qt3DCore.py new file mode 100644 index 0000000..523e1de --- /dev/null +++ b/python3.9libs/qtpy/Qt3DCore.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides Qt3DCore classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError, PYSIDE_VERSION +from .py3compat import PY2 + +if PYQT5: + from PyQt5.Qt3DCore import * +elif PYSIDE2: + if not PY2 or (PY2 and PYSIDE_VERSION < '5.12.4'): + # https://bugreports.qt.io/projects/PYSIDE/issues/PYSIDE-1026 + import PySide2.Qt3DCore as __temp + import inspect + for __name in inspect.getmembers(__temp.Qt3DCore): + globals()[__name[0]] = __name[1] + else: + raise PythonQtError('A bug in Shiboken prevents this') +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/Qt3DExtras.py b/python3.9libs/qtpy/Qt3DExtras.py new file mode 100644 index 0000000..4f3a9c1 --- /dev/null +++ b/python3.9libs/qtpy/Qt3DExtras.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides Qt3DExtras classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError, PYSIDE_VERSION +from .py3compat import PY2 + +if PYQT5: + from PyQt5.Qt3DExtras import * +elif PYSIDE2: + if not PY2 or (PY2 and PYSIDE_VERSION < '5.12.4'): + # https://bugreports.qt.io/projects/PYSIDE/issues/PYSIDE-1026 + import PySide2.Qt3DExtras as __temp + import inspect + for __name in inspect.getmembers(__temp.Qt3DExtras): + globals()[__name[0]] = __name[1] + else: + raise PythonQtError('A bug in Shiboken prevents this') +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/Qt3DInput.py b/python3.9libs/qtpy/Qt3DInput.py new file mode 100644 index 0000000..87b9a96 --- /dev/null +++ b/python3.9libs/qtpy/Qt3DInput.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides Qt3DInput classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError, PYSIDE_VERSION +from .py3compat import PY2 + +if PYQT5: + from PyQt5.Qt3DInput import * +elif PYSIDE2: + if not PY2 or (PY2 and PYSIDE_VERSION < '5.12.4'): + # https://bugreports.qt.io/projects/PYSIDE/issues/PYSIDE-1026 + import PySide2.Qt3DInput as __temp + import inspect + for __name in inspect.getmembers(__temp.Qt3DInput): + globals()[__name[0]] = __name[1] + else: + raise PythonQtError('A bug in Shiboken prevents this') +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/Qt3DLogic.py b/python3.9libs/qtpy/Qt3DLogic.py new file mode 100644 index 0000000..d17f136 --- /dev/null +++ b/python3.9libs/qtpy/Qt3DLogic.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides Qt3DLogic classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError, PYSIDE_VERSION +from .py3compat import PY2 + +if PYQT5: + from PyQt5.Qt3DLogic import * +elif PYSIDE2: + if not PY2 or (PY2 and PYSIDE_VERSION < '5.12.4'): + # https://bugreports.qt.io/projects/PYSIDE/issues/PYSIDE-1026 + import PySide2.Qt3DLogic as __temp + import inspect + for __name in inspect.getmembers(__temp.Qt3DLogic): + globals()[__name[0]] = __name[1] + else: + raise PythonQtError('A bug in Shiboken prevents this') +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/Qt3DRender.py b/python3.9libs/qtpy/Qt3DRender.py new file mode 100644 index 0000000..f30331a --- /dev/null +++ b/python3.9libs/qtpy/Qt3DRender.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides Qt3DRender classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError, PYSIDE_VERSION +from .py3compat import PY2 + +if PYQT5: + from PyQt5.Qt3DRender import * +elif PYSIDE2: + if not PY2 or (PY2 and PYSIDE_VERSION < '5.12.4'): + # https://bugreports.qt.io/projects/PYSIDE/issues/PYSIDE-1026 + import PySide2.Qt3DRender as __temp + import inspect + for __name in inspect.getmembers(__temp.Qt3DRender): + globals()[__name[0]] = __name[1] + else: + raise PythonQtError('A bug in Shiboken prevents this') +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtCharts.py b/python3.9libs/qtpy/QtCharts.py new file mode 100644 index 0000000..7467123 --- /dev/null +++ b/python3.9libs/qtpy/QtCharts.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2019- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtChart classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError + +if PYQT5: + try: + from PyQt5 import QtChart as QtCharts + except ImportError: + raise PythonQtError('The QtChart module was not found. ' + 'It needs to be installed separately for PyQt5.') +elif PYSIDE2: + from PySide2.QtCharts import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtCore.py b/python3.9libs/qtpy/QtCore.py new file mode 100644 index 0000000..f583b05 --- /dev/null +++ b/python3.9libs/qtpy/QtCore.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2014-2015 Colin Duquesnoy +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +""" +Provides QtCore classes and functions. +""" + +from . import PYQT5, PYSIDE2, PYQT4, PYSIDE, PythonQtError + + +if PYQT5: + from PyQt5.QtCore import * + from PyQt5.QtCore import pyqtSignal as Signal + from PyQt5.QtCore import pyqtSlot as Slot + from PyQt5.QtCore import pyqtProperty as Property + from PyQt5.QtCore import QT_VERSION_STR as __version__ + + # For issue #153 + from PyQt5.QtCore import QDateTime + QDateTime.toPython = QDateTime.toPyDateTime + + # Those are imported from `import *` + del pyqtSignal, pyqtSlot, pyqtProperty, QT_VERSION_STR +elif PYSIDE2: + from PySide2.QtCore import * + + try: # may be limited to PySide-5.11a1 only + from PySide2.QtGui import QStringListModel + except: + pass + + import PySide2.QtCore + __version__ = PySide2.QtCore.__version__ +elif PYQT4: + from PyQt4.QtCore import * + # Those are things we inherited from Spyder that fix crazy crashes under + # some specific situations. (See #34) + from PyQt4.QtCore import QCoreApplication + from PyQt4.QtCore import Qt + from PyQt4.QtCore import pyqtSignal as Signal + from PyQt4.QtCore import pyqtSlot as Slot + from PyQt4.QtCore import pyqtProperty as Property + from PyQt4.QtGui import (QItemSelection, QItemSelectionModel, + QItemSelectionRange, QSortFilterProxyModel, + QStringListModel) + from PyQt4.QtCore import QT_VERSION_STR as __version__ + from PyQt4.QtCore import qInstallMsgHandler as qInstallMessageHandler + + # QDesktopServices has has been split into (QDesktopServices and + # QStandardPaths) in Qt5 + # This creates a dummy class that emulates QStandardPaths + from PyQt4.QtGui import QDesktopServices as _QDesktopServices + + class QStandardPaths(): + StandardLocation = _QDesktopServices.StandardLocation + displayName = _QDesktopServices.displayName + DesktopLocation = _QDesktopServices.DesktopLocation + DocumentsLocation = _QDesktopServices.DocumentsLocation + FontsLocation = _QDesktopServices.FontsLocation + ApplicationsLocation = _QDesktopServices.ApplicationsLocation + MusicLocation = _QDesktopServices.MusicLocation + MoviesLocation = _QDesktopServices.MoviesLocation + PicturesLocation = _QDesktopServices.PicturesLocation + TempLocation = _QDesktopServices.TempLocation + HomeLocation = _QDesktopServices.HomeLocation + DataLocation = _QDesktopServices.DataLocation + CacheLocation = _QDesktopServices.CacheLocation + writableLocation = _QDesktopServices.storageLocation + + # Those are imported from `import *` + del pyqtSignal, pyqtSlot, pyqtProperty, QT_VERSION_STR, qInstallMsgHandler +elif PYSIDE: + from PySide.QtCore import * + from PySide.QtGui import (QItemSelection, QItemSelectionModel, + QItemSelectionRange, QSortFilterProxyModel, + QStringListModel) + from PySide.QtCore import qInstallMsgHandler as qInstallMessageHandler + del qInstallMsgHandler + + # QDesktopServices has has been split into (QDesktopServices and + # QStandardPaths) in Qt5 + # This creates a dummy class that emulates QStandardPaths + from PySide.QtGui import QDesktopServices as _QDesktopServices + + class QStandardPaths(): + StandardLocation = _QDesktopServices.StandardLocation + displayName = _QDesktopServices.displayName + DesktopLocation = _QDesktopServices.DesktopLocation + DocumentsLocation = _QDesktopServices.DocumentsLocation + FontsLocation = _QDesktopServices.FontsLocation + ApplicationsLocation = _QDesktopServices.ApplicationsLocation + MusicLocation = _QDesktopServices.MusicLocation + MoviesLocation = _QDesktopServices.MoviesLocation + PicturesLocation = _QDesktopServices.PicturesLocation + TempLocation = _QDesktopServices.TempLocation + HomeLocation = _QDesktopServices.HomeLocation + DataLocation = _QDesktopServices.DataLocation + CacheLocation = _QDesktopServices.CacheLocation + writableLocation = _QDesktopServices.storageLocation + + import PySide.QtCore + __version__ = PySide.QtCore.__version__ +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtDatavisualization.py b/python3.9libs/qtpy/QtDatavisualization.py new file mode 100644 index 0000000..cfb2b3b --- /dev/null +++ b/python3.9libs/qtpy/QtDatavisualization.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtDataVisualization classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError + +if PYQT5: + from PyQt5.QtDataVisualization import * +elif PYSIDE2: + # https://bugreports.qt.io/projects/PYSIDE/issues/PYSIDE-1026 + import PySide2.QtDataVisualization as __temp + import inspect + for __name in inspect.getmembers(__temp.QtDataVisualization): + globals()[__name[0]] = __name[1] +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtDesigner.py b/python3.9libs/qtpy/QtDesigner.py new file mode 100644 index 0000000..4aaafc8 --- /dev/null +++ b/python3.9libs/qtpy/QtDesigner.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2014-2015 Colin Duquesnoy +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +""" +Provides QtDesigner classes and functions. +""" + +from . import PYQT5, PYQT4, PythonQtError + + +if PYQT5: + from PyQt5.QtDesigner import * +elif PYQT4: + from PyQt4.QtDesigner import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtGui.py b/python3.9libs/qtpy/QtGui.py new file mode 100644 index 0000000..071be13 --- /dev/null +++ b/python3.9libs/qtpy/QtGui.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2014-2015 Colin Duquesnoy +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +""" +Provides QtGui classes and functions. +.. warning:: Only PyQt4/PySide QtGui classes compatible with PyQt5.QtGui are + exposed here. Therefore, you need to treat/use this package as if it were + the ``PyQt5.QtGui`` module. +""" +import warnings + +from . import PYQT5, PYQT4, PYSIDE, PYSIDE2, PythonQtError + + +if PYQT5: + from PyQt5.QtGui import * +elif PYSIDE2: + from PySide2.QtGui import * +elif PYQT4: + try: + # Older versions of PyQt4 do not provide these + from PyQt4.QtGui import (QGlyphRun, QMatrix2x2, QMatrix2x3, + QMatrix2x4, QMatrix3x2, QMatrix3x3, + QMatrix3x4, QMatrix4x2, QMatrix4x3, + QMatrix4x4, QTouchEvent, QQuaternion, + QRadialGradient, QRawFont, QStaticText, + QVector2D, QVector3D, QVector4D, + qFuzzyCompare) + except ImportError: + pass + from PyQt4.Qt import QKeySequence, QTextCursor + from PyQt4.QtGui import (QAbstractTextDocumentLayout, QActionEvent, QBitmap, + QBrush, QClipboard, QCloseEvent, QColor, + QConicalGradient, QContextMenuEvent, QCursor, + QDoubleValidator, QDrag, + QDragEnterEvent, QDragLeaveEvent, QDragMoveEvent, + QDropEvent, QFileOpenEvent, QFocusEvent, QFont, + QFontDatabase, QFontInfo, QFontMetrics, + QFontMetricsF, QGradient, QHelpEvent, + QHideEvent, QHoverEvent, QIcon, QIconDragEvent, + QIconEngine, QImage, QImageIOHandler, QImageReader, + QImageWriter, QInputEvent, QInputMethodEvent, + QKeyEvent, QLinearGradient, + QMouseEvent, QMoveEvent, QMovie, + QPaintDevice, QPaintEngine, QPaintEngineState, + QPaintEvent, QPainter, QPainterPath, + QPainterPathStroker, QPalette, QPen, QPicture, + QPictureIO, QPixmap, QPixmapCache, QPolygon, + QPolygonF, QRegExpValidator, QRegion, QResizeEvent, + QSessionManager, QShortcutEvent, QShowEvent, + QStandardItem, QStandardItemModel, + QStatusTipEvent, QSyntaxHighlighter, QTabletEvent, + QTextBlock, QTextBlockFormat, QTextBlockGroup, + QTextBlockUserData, QTextCharFormat, + QTextDocument, QTextDocumentFragment, + QTextDocumentWriter, QTextFormat, QTextFragment, + QTextFrame, QTextFrameFormat, QTextImageFormat, + QTextInlineObject, QTextItem, QTextLayout, + QTextLength, QTextLine, QTextList, QTextListFormat, + QTextObject, QTextObjectInterface, QTextOption, + QTextTable, QTextTableCell, QTextTableCellFormat, + QTextTableFormat, QTransform, + QValidator, QWhatsThisClickedEvent, QWheelEvent, + QWindowStateChangeEvent, qAlpha, qBlue, + qGray, qGreen, qIsGray, qRed, qRgb, + qRgba, QIntValidator) + + # QDesktopServices has has been split into (QDesktopServices and + # QStandardPaths) in Qt5 + # It only exposes QDesktopServices that are still in pyqt5 + from PyQt4.QtGui import QDesktopServices as _QDesktopServices + + class QDesktopServices(): + openUrl = _QDesktopServices.openUrl + setUrlHandler = _QDesktopServices.setUrlHandler + unsetUrlHandler = _QDesktopServices.unsetUrlHandler + + def __getattr__(self, name): + attr = getattr(_QDesktopServices, name) + + new_name = name + if name == 'storageLocation': + new_name = 'writableLocation' + warnings.warn(("Warning QDesktopServices.{} is deprecated in Qt5" + "we recommend you use QDesktopServices.{} instead").format(name, new_name), + DeprecationWarning) + return attr + QDesktopServices = QDesktopServices() + +elif PYSIDE: + from PySide.QtGui import (QAbstractTextDocumentLayout, QActionEvent, QBitmap, + QBrush, QClipboard, QCloseEvent, QColor, + QConicalGradient, QContextMenuEvent, QCursor, + QDoubleValidator, QDrag, + QDragEnterEvent, QDragLeaveEvent, QDragMoveEvent, + QDropEvent, QFileOpenEvent, QFocusEvent, QFont, + QFontDatabase, QFontInfo, QFontMetrics, + QFontMetricsF, QGradient, QHelpEvent, + QHideEvent, QHoverEvent, QIcon, QIconDragEvent, + QIconEngine, QImage, QImageIOHandler, QImageReader, + QImageWriter, QInputEvent, QInputMethodEvent, + QKeyEvent, QKeySequence, QLinearGradient, + QMatrix2x2, QMatrix2x3, QMatrix2x4, QMatrix3x2, + QMatrix3x3, QMatrix3x4, QMatrix4x2, QMatrix4x3, + QMatrix4x4, QMouseEvent, QMoveEvent, QMovie, + QPaintDevice, QPaintEngine, QPaintEngineState, + QPaintEvent, QPainter, QPainterPath, + QPainterPathStroker, QPalette, QPen, QPicture, + QPictureIO, QPixmap, QPixmapCache, QPolygon, + QPolygonF, QQuaternion, QRadialGradient, + QRegExpValidator, QRegion, QResizeEvent, + QSessionManager, QShortcutEvent, QShowEvent, + QStandardItem, QStandardItemModel, + QStatusTipEvent, QSyntaxHighlighter, QTabletEvent, + QTextBlock, QTextBlockFormat, QTextBlockGroup, + QTextBlockUserData, QTextCharFormat, QTextCursor, + QTextDocument, QTextDocumentFragment, + QTextFormat, QTextFragment, + QTextFrame, QTextFrameFormat, QTextImageFormat, + QTextInlineObject, QTextItem, QTextLayout, + QTextLength, QTextLine, QTextList, QTextListFormat, + QTextObject, QTextObjectInterface, QTextOption, + QTextTable, QTextTableCell, QTextTableCellFormat, + QTextTableFormat, QTouchEvent, QTransform, + QValidator, QVector2D, QVector3D, QVector4D, + QWhatsThisClickedEvent, QWheelEvent, + QWindowStateChangeEvent, qAlpha, qBlue, + qGray, qGreen, qIsGray, qRed, qRgb, qRgba, + QIntValidator) + # QDesktopServices has has been split into (QDesktopServices and + # QStandardPaths) in Qt5 + # It only exposes QDesktopServices that are still in pyqt5 + from PySide.QtGui import QDesktopServices as _QDesktopServices + + class QDesktopServices(): + openUrl = _QDesktopServices.openUrl + setUrlHandler = _QDesktopServices.setUrlHandler + unsetUrlHandler = _QDesktopServices.unsetUrlHandler + + def __getattr__(self, name): + attr = getattr(_QDesktopServices, name) + + new_name = name + if name == 'storageLocation': + new_name = 'writableLocation' + warnings.warn(("Warning QDesktopServices.{} is deprecated in Qt5" + "we recommend you use QDesktopServices.{} instead").format(name, new_name), + DeprecationWarning) + return attr + QDesktopServices = QDesktopServices() +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtHelp.py b/python3.9libs/qtpy/QtHelp.py new file mode 100644 index 0000000..ca9d93d --- /dev/null +++ b/python3.9libs/qtpy/QtHelp.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +"""QtHelp Wrapper.""" + +import warnings + +from . import PYQT5 +from . import PYQT4 +from . import PYSIDE +from . import PYSIDE2 + +if PYQT5: + from PyQt5.QtHelp import * +elif PYSIDE2: + from PySide2.QtHelp import * +elif PYQT4: + from PyQt4.QtHelp import * +elif PYSIDE: + from PySide.QtHelp import * diff --git a/python3.9libs/qtpy/QtLocation.py b/python3.9libs/qtpy/QtLocation.py new file mode 100644 index 0000000..9dfe874 --- /dev/null +++ b/python3.9libs/qtpy/QtLocation.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtLocation classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError + +if PYQT5: + from PyQt5.QtLocation import * +elif PYSIDE2: + from PySide2.QtLocation import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtMultimedia.py b/python3.9libs/qtpy/QtMultimedia.py new file mode 100644 index 0000000..9015ece --- /dev/null +++ b/python3.9libs/qtpy/QtMultimedia.py @@ -0,0 +1,17 @@ +import warnings + +from . import PYQT5 +from . import PYQT4 +from . import PYSIDE +from . import PYSIDE2 + +if PYQT5: + from PyQt5.QtMultimedia import * +elif PYSIDE2: + from PySide2.QtMultimedia import * +elif PYQT4: + from PyQt4.QtMultimedia import * + from PyQt4.QtGui import QSound +elif PYSIDE: + from PySide.QtMultimedia import * + from PySide.QtGui import QSound diff --git a/python3.9libs/qtpy/QtMultimediaWidgets.py b/python3.9libs/qtpy/QtMultimediaWidgets.py new file mode 100644 index 0000000..697845d --- /dev/null +++ b/python3.9libs/qtpy/QtMultimediaWidgets.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtMultimediaWidgets classes and functions.""" + +# Local imports +from . import PYSIDE2, PYQT5, PythonQtError + +if PYQT5: + from PyQt5.QtMultimediaWidgets import * +elif PYSIDE2: + from PySide2.QtMultimediaWidgets import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtNetwork.py b/python3.9libs/qtpy/QtNetwork.py new file mode 100644 index 0000000..49faded --- /dev/null +++ b/python3.9libs/qtpy/QtNetwork.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2014-2015 Colin Duquesnoy +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +""" +Provides QtNetwork classes and functions. +""" + +from . import PYQT5, PYSIDE2, PYQT4, PYSIDE, PythonQtError + + +if PYQT5: + from PyQt5.QtNetwork import * +elif PYSIDE2: + from PySide2.QtNetwork import * +elif PYQT4: + from PyQt4.QtNetwork import * +elif PYSIDE: + from PySide.QtNetwork import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtOpenGL.py b/python3.9libs/qtpy/QtOpenGL.py new file mode 100644 index 0000000..69ef822 --- /dev/null +++ b/python3.9libs/qtpy/QtOpenGL.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtOpenGL classes and functions.""" + +# Local imports +from . import PYQT4, PYQT5, PYSIDE, PYSIDE2, PythonQtError + +if PYQT5: + from PyQt5.QtOpenGL import * +elif PYSIDE2: + from PySide2.QtOpenGL import * +elif PYQT4: + from PyQt4.QtOpenGL import * +elif PYSIDE: + from PySide.QtOpenGL import * +else: + raise PythonQtError('No Qt bindings could be found') + +del PYQT4, PYQT5, PYSIDE, PYSIDE2 diff --git a/python3.9libs/qtpy/QtPrintSupport.py b/python3.9libs/qtpy/QtPrintSupport.py new file mode 100644 index 0000000..b821d41 --- /dev/null +++ b/python3.9libs/qtpy/QtPrintSupport.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +""" +Provides QtPrintSupport classes and functions. +""" + +from . import PYQT5, PYQT4,PYSIDE2, PYSIDE, PythonQtError + + +if PYQT5: + from PyQt5.QtPrintSupport import * +elif PYSIDE2: + from PySide2.QtPrintSupport import * +elif PYQT4: + from PyQt4.QtGui import (QAbstractPrintDialog, QPageSetupDialog, + QPrintDialog, QPrintEngine, QPrintPreviewDialog, + QPrintPreviewWidget, QPrinter, QPrinterInfo) +elif PYSIDE: + from PySide.QtGui import (QAbstractPrintDialog, QPageSetupDialog, + QPrintDialog, QPrintEngine, QPrintPreviewDialog, + QPrintPreviewWidget, QPrinter, QPrinterInfo) +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtQml.py b/python3.9libs/qtpy/QtQml.py new file mode 100644 index 0000000..117f977 --- /dev/null +++ b/python3.9libs/qtpy/QtQml.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtQml classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError + +if PYQT5: + from PyQt5.QtQml import * +elif PYSIDE2: + from PySide2.QtQml import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtQuick.py b/python3.9libs/qtpy/QtQuick.py new file mode 100644 index 0000000..8291066 --- /dev/null +++ b/python3.9libs/qtpy/QtQuick.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtQuick classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError + +if PYQT5: + from PyQt5.QtQuick import * +elif PYSIDE2: + from PySide2.QtQuick import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtQuickWidgets.py b/python3.9libs/qtpy/QtQuickWidgets.py new file mode 100644 index 0000000..545d52b --- /dev/null +++ b/python3.9libs/qtpy/QtQuickWidgets.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtQuickWidgets classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PythonQtError + +if PYQT5: + from PyQt5.QtQuickWidgets import * +elif PYSIDE2: + from PySide2.QtQuickWidgets import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtSql.py b/python3.9libs/qtpy/QtSql.py new file mode 100644 index 0000000..98520be --- /dev/null +++ b/python3.9libs/qtpy/QtSql.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtSql classes and functions.""" + +# Local imports +from . import PYQT5, PYSIDE2, PYQT4, PYSIDE, PythonQtError + +if PYQT5: + from PyQt5.QtSql import * +elif PYSIDE2: + from PySide2.QtSql import * +elif PYQT4: + from PyQt4.QtSql import * +elif PYSIDE: + from PySide.QtSql import * +else: + raise PythonQtError('No Qt bindings could be found') + +del PYQT4, PYQT5, PYSIDE, PYSIDE2 diff --git a/python3.9libs/qtpy/QtSvg.py b/python3.9libs/qtpy/QtSvg.py new file mode 100644 index 0000000..edc075e --- /dev/null +++ b/python3.9libs/qtpy/QtSvg.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtSvg classes and functions.""" + +# Local imports +from . import PYQT4, PYSIDE2, PYQT5, PYSIDE, PythonQtError + +if PYQT5: + from PyQt5.QtSvg import * +elif PYSIDE2: + from PySide2.QtSvg import * +elif PYQT4: + from PyQt4.QtSvg import * +elif PYSIDE: + from PySide.QtSvg import * +else: + raise PythonQtError('No Qt bindings could be found') + +del PYQT4, PYQT5, PYSIDE, PYSIDE2 diff --git a/python3.9libs/qtpy/QtTest.py b/python3.9libs/qtpy/QtTest.py new file mode 100644 index 0000000..cca5e19 --- /dev/null +++ b/python3.9libs/qtpy/QtTest.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2014-2015 Colin Duquesnoy +# Copyright © 2009- The Spyder Developmet Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +""" +Provides QtTest and functions +""" + +from . import PYQT5,PYSIDE2, PYQT4, PYSIDE, PythonQtError + + +if PYQT5: + from PyQt5.QtTest import QTest +elif PYSIDE2: + from PySide2.QtTest import QTest +elif PYQT4: + from PyQt4.QtTest import QTest as OldQTest + + class QTest(OldQTest): + @staticmethod + def qWaitForWindowActive(QWidget): + OldQTest.qWaitForWindowShown(QWidget) +elif PYSIDE: + from PySide.QtTest import QTest +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtWebChannel.py b/python3.9libs/qtpy/QtWebChannel.py new file mode 100644 index 0000000..2862a05 --- /dev/null +++ b/python3.9libs/qtpy/QtWebChannel.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtWebChannel classes and functions.""" + +# Local imports +from . import PYSIDE2, PYQT5, PythonQtError + +if PYQT5: + from PyQt5.QtWebChannel import * +elif PYSIDE2: + from PySide2.QtWebChannel import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtWebEngineWidgets.py b/python3.9libs/qtpy/QtWebEngineWidgets.py new file mode 100644 index 0000000..d1df5bf --- /dev/null +++ b/python3.9libs/qtpy/QtWebEngineWidgets.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2014-2015 Colin Duquesnoy +# Copyright © 2009- The Spyder development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +""" +Provides QtWebEngineWidgets classes and functions. +""" + +from . import PYQT5,PYSIDE2, PYQT4, PYSIDE, PythonQtError + + +# To test if we are using WebEngine or WebKit +WEBENGINE = True + + +if PYQT5: + try: + from PyQt5.QtWebEngineWidgets import QWebEnginePage + from PyQt5.QtWebEngineWidgets import QWebEngineView + from PyQt5.QtWebEngineWidgets import QWebEngineSettings + except ImportError: + from PyQt5.QtWebKitWidgets import QWebPage as QWebEnginePage + from PyQt5.QtWebKitWidgets import QWebView as QWebEngineView + from PyQt5.QtWebKit import QWebSettings as QWebEngineSettings + WEBENGINE = False +elif PYSIDE2: + from PySide2.QtWebEngineWidgets import QWebEnginePage + from PySide2.QtWebEngineWidgets import QWebEngineView + from PySide2.QtWebEngineWidgets import QWebEngineSettings +elif PYQT4: + from PyQt4.QtWebKit import QWebPage as QWebEnginePage + from PyQt4.QtWebKit import QWebView as QWebEngineView + from PyQt4.QtWebKit import QWebSettings as QWebEngineSettings + WEBENGINE = False +elif PYSIDE: + from PySide.QtWebKit import QWebPage as QWebEnginePage + from PySide.QtWebKit import QWebView as QWebEngineView + from PySide.QtWebKit import QWebSettings as QWebEngineSettings + WEBENGINE = False +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtWebSockets.py b/python3.9libs/qtpy/QtWebSockets.py new file mode 100644 index 0000000..4b6a820 --- /dev/null +++ b/python3.9libs/qtpy/QtWebSockets.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtWebSockets classes and functions.""" + +# Local imports +from . import PYSIDE2, PYQT5, PythonQtError + +if PYQT5: + from PyQt5.QtWebSockets import * +elif PYSIDE2: + from PySide2.QtWebSockets import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtWidgets.py b/python3.9libs/qtpy/QtWidgets.py new file mode 100644 index 0000000..739f9ce --- /dev/null +++ b/python3.9libs/qtpy/QtWidgets.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2014-2015 Colin Duquesnoy +# Copyright © 2009- The Spyder Developmet Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +""" +Provides widget classes and functions. +.. warning:: Only PyQt4/PySide QtGui classes compatible with PyQt5.QtWidgets + are exposed here. Therefore, you need to treat/use this package as if it + were the ``PyQt5.QtWidgets`` module. +""" + +from . import PYQT5, PYSIDE2, PYQT4, PYSIDE, PythonQtError +from ._patch.qcombobox import patch_qcombobox +from ._patch.qheaderview import introduce_renamed_methods_qheaderview + + +if PYQT5: + from PyQt5.QtWidgets import * +elif PYSIDE2: + from PySide2.QtWidgets import * +elif PYQT4: + from PyQt4.QtGui import * + QStyleOptionViewItem = QStyleOptionViewItemV4 + del QStyleOptionViewItemV4 + + # These objects belong to QtGui + try: + # Older versions of PyQt4 do not provide these + del (QGlyphRun, + QMatrix2x2, QMatrix2x3, QMatrix2x4, QMatrix3x2, QMatrix3x3, + QMatrix3x4, QMatrix4x2, QMatrix4x3, QMatrix4x4, + QQuaternion, QRadialGradient, QRawFont, QRegExpValidator, + QStaticText, QTouchEvent, QVector2D, QVector3D, QVector4D, + qFuzzyCompare) + except NameError: + pass + del (QAbstractTextDocumentLayout, QActionEvent, QBitmap, QBrush, QClipboard, + QCloseEvent, QColor, QConicalGradient, QContextMenuEvent, QCursor, + QDesktopServices, QDoubleValidator, QDrag, QDragEnterEvent, + QDragLeaveEvent, QDragMoveEvent, QDropEvent, QFileOpenEvent, + QFocusEvent, QFont, QFontDatabase, QFontInfo, QFontMetrics, + QFontMetricsF, QGradient, QHelpEvent, QHideEvent, + QHoverEvent, QIcon, QIconDragEvent, QIconEngine, QImage, + QImageIOHandler, QImageReader, QImageWriter, QInputEvent, + QInputMethodEvent, QKeyEvent, QKeySequence, QLinearGradient, + QMouseEvent, QMoveEvent, QMovie, QPaintDevice, QPaintEngine, + QPaintEngineState, QPaintEvent, QPainter, QPainterPath, + QPainterPathStroker, QPalette, QPen, QPicture, QPictureIO, QPixmap, + QPixmapCache, QPolygon, QPolygonF, + QRegion, QResizeEvent, QSessionManager, QShortcutEvent, QShowEvent, + QStandardItem, QStandardItemModel, QStatusTipEvent, + QSyntaxHighlighter, QTabletEvent, QTextBlock, QTextBlockFormat, + QTextBlockGroup, QTextBlockUserData, QTextCharFormat, QTextCursor, + QTextDocument, QTextDocumentFragment, QTextDocumentWriter, + QTextFormat, QTextFragment, QTextFrame, QTextFrameFormat, + QTextImageFormat, QTextInlineObject, QTextItem, QTextLayout, + QTextLength, QTextLine, QTextList, QTextListFormat, QTextObject, + QTextObjectInterface, QTextOption, QTextTable, QTextTableCell, + QTextTableCellFormat, QTextTableFormat, QTransform, + QValidator, QWhatsThisClickedEvent, + QWheelEvent, QWindowStateChangeEvent, qAlpha, qBlue, + qGray, qGreen, qIsGray, qRed, qRgb, qRgba, QIntValidator, + QStringListModel) + + # These objects belong to QtPrintSupport + del (QAbstractPrintDialog, QPageSetupDialog, QPrintDialog, QPrintEngine, + QPrintPreviewDialog, QPrintPreviewWidget, QPrinter, QPrinterInfo) + + # These objects belong to QtCore + del (QItemSelection, QItemSelectionModel, QItemSelectionRange, + QSortFilterProxyModel) + + # Patch QComboBox to allow Python objects to be passed to userData + patch_qcombobox(QComboBox) + + # QHeaderView: renamed methods + introduce_renamed_methods_qheaderview(QHeaderView) + +elif PYSIDE: + from PySide.QtGui import * + QStyleOptionViewItem = QStyleOptionViewItemV4 + del QStyleOptionViewItemV4 + + # These objects belong to QtGui + del (QAbstractTextDocumentLayout, QActionEvent, QBitmap, QBrush, QClipboard, + QCloseEvent, QColor, QConicalGradient, QContextMenuEvent, QCursor, + QDesktopServices, QDoubleValidator, QDrag, QDragEnterEvent, + QDragLeaveEvent, QDragMoveEvent, QDropEvent, QFileOpenEvent, + QFocusEvent, QFont, QFontDatabase, QFontInfo, QFontMetrics, + QFontMetricsF, QGradient, QHelpEvent, QHideEvent, + QHoverEvent, QIcon, QIconDragEvent, QIconEngine, QImage, + QImageIOHandler, QImageReader, QImageWriter, QInputEvent, + QInputMethodEvent, QKeyEvent, QKeySequence, QLinearGradient, + QMatrix2x2, QMatrix2x3, QMatrix2x4, QMatrix3x2, QMatrix3x3, + QMatrix3x4, QMatrix4x2, QMatrix4x3, QMatrix4x4, QMouseEvent, + QMoveEvent, QMovie, QPaintDevice, QPaintEngine, QPaintEngineState, + QPaintEvent, QPainter, QPainterPath, QPainterPathStroker, QPalette, + QPen, QPicture, QPictureIO, QPixmap, QPixmapCache, QPolygon, + QPolygonF, QQuaternion, QRadialGradient, QRegExpValidator, + QRegion, QResizeEvent, QSessionManager, QShortcutEvent, QShowEvent, + QStandardItem, QStandardItemModel, QStatusTipEvent, + QSyntaxHighlighter, QTabletEvent, QTextBlock, QTextBlockFormat, + QTextBlockGroup, QTextBlockUserData, QTextCharFormat, QTextCursor, + QTextDocument, QTextDocumentFragment, + QTextFormat, QTextFragment, QTextFrame, QTextFrameFormat, + QTextImageFormat, QTextInlineObject, QTextItem, QTextLayout, + QTextLength, QTextLine, QTextList, QTextListFormat, QTextObject, + QTextObjectInterface, QTextOption, QTextTable, QTextTableCell, + QTextTableCellFormat, QTextTableFormat, QTouchEvent, QTransform, + QValidator, QVector2D, QVector3D, QVector4D, QWhatsThisClickedEvent, + QWheelEvent, QWindowStateChangeEvent, qAlpha, qBlue, qGray, qGreen, + qIsGray, qRed, qRgb, qRgba, QIntValidator, QStringListModel) + + # These objects belong to QtPrintSupport + del (QAbstractPrintDialog, QPageSetupDialog, QPrintDialog, QPrintEngine, + QPrintPreviewDialog, QPrintPreviewWidget, QPrinter, QPrinterInfo) + + # These objects belong to QtCore + del (QItemSelection, QItemSelectionModel, QItemSelectionRange, + QSortFilterProxyModel) + + # Patch QComboBox to allow Python objects to be passed to userData + patch_qcombobox(QComboBox) + + # QHeaderView: renamed methods + introduce_renamed_methods_qheaderview(QHeaderView) + +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/QtXmlPatterns.py b/python3.9libs/qtpy/QtXmlPatterns.py new file mode 100644 index 0000000..b41e13d --- /dev/null +++ b/python3.9libs/qtpy/QtXmlPatterns.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright © 2009- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +# ----------------------------------------------------------------------------- +"""Provides QtXmlPatterns classes and functions.""" + +# Local imports +from . import PYQT4, PYSIDE2, PYQT5, PYSIDE, PythonQtError + +if PYQT5: + from PyQt5.QtXmlPatterns import * +elif PYSIDE2: + from PySide2.QtXmlPatterns import * +elif PYQT4: + from PyQt4.QtXmlPatterns import * +elif PYSIDE: + from PySide.QtXmlPatterns import * +else: + raise PythonQtError('No Qt bindings could be found') diff --git a/python3.9libs/qtpy/__init__.py b/python3.9libs/qtpy/__init__.py new file mode 100644 index 0000000..ddbe452 --- /dev/null +++ b/python3.9libs/qtpy/__init__.py @@ -0,0 +1,226 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2009- The Spyder Development Team +# Copyright © 2014-2015 Colin Duquesnoy +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) + +""" +**QtPy** is a shim over the various Python Qt bindings. It is used to write +Qt binding indenpendent libraries or applications. + +If one of the APIs has already been imported, then it will be used. + +Otherwise, the shim will automatically select the first available API (PyQt5, +PySide2, PyQt4 and finally PySide); in that case, you can force the use of one +specific bindings (e.g. if your application is using one specific bindings and +you need to use library that use QtPy) by setting up the ``QT_API`` environment +variable. + +PyQt5 +===== + +For PyQt5, you don't have to set anything as it will be used automatically:: + + >>> from qtpy import QtGui, QtWidgets, QtCore + >>> print(QtWidgets.QWidget) + + +PySide2 +====== + +Set the QT_API environment variable to 'pyside2' before importing other +packages:: + + >>> import os + >>> os.environ['QT_API'] = 'pyside2' + >>> from qtpy import QtGui, QtWidgets, QtCore + >>> print(QtWidgets.QWidget) + +PyQt4 +===== + +Set the ``QT_API`` environment variable to 'pyqt' before importing any python +package:: + + >>> import os + >>> os.environ['QT_API'] = 'pyqt' + >>> from qtpy import QtGui, QtWidgets, QtCore + >>> print(QtWidgets.QWidget) + +PySide +====== + +Set the QT_API environment variable to 'pyside' before importing other +packages:: + + >>> import os + >>> os.environ['QT_API'] = 'pyside' + >>> from qtpy import QtGui, QtWidgets, QtCore + >>> print(QtWidgets.QWidget) + +""" + +from distutils.version import LooseVersion +import os +import platform +import sys +import warnings + +# Version of QtPy +from ._version import __version__ + + +class PythonQtError(RuntimeError): + """Error raise if no bindings could be selected.""" + pass + + +class PythonQtWarning(Warning): + """Warning if some features are not implemented in a binding.""" + pass + + +# Qt API environment variable name +QT_API = 'QT_API' + +# Names of the expected PyQt5 api +PYQT5_API = ['pyqt5'] + +# Names of the expected PyQt4 api +PYQT4_API = [ + 'pyqt', # name used in IPython.qt + 'pyqt4' # pyqode.qt original name +] + +# Names of the expected PySide api +PYSIDE_API = ['pyside'] + +# Names of the expected PySide2 api +PYSIDE2_API = ['pyside2'] + +# Detecting if a binding was specified by the user +binding_specified = QT_API in os.environ + +# Setting a default value for QT_API +os.environ.setdefault(QT_API, 'pyqt5') + +API = os.environ[QT_API].lower() +initial_api = API +assert API in (PYQT5_API + PYQT4_API + PYSIDE_API + PYSIDE2_API) + +is_old_pyqt = is_pyqt46 = False +PYQT5 = True +PYQT4 = PYSIDE = PYSIDE2 = False + +# When `FORCE_QT_API` is set, we disregard +# any previously imported python bindings. +if os.environ.get('FORCE_QT_API') is not None: + if 'PyQt5' in sys.modules: + API = initial_api if initial_api in PYQT5_API else 'pyqt5' + elif 'PySide2' in sys.modules: + API = initial_api if initial_api in PYSIDE2_API else 'pyside2' + elif 'PyQt4' in sys.modules: + API = initial_api if initial_api in PYQT4_API else 'pyqt4' + elif 'PySide' in sys.modules: + API = initial_api if initial_api in PYSIDE_API else 'pyside' + + +if API in PYQT5_API: + try: + from PyQt5.QtCore import PYQT_VERSION_STR as PYQT_VERSION # analysis:ignore + from PyQt5.QtCore import QT_VERSION_STR as QT_VERSION # analysis:ignore + PYSIDE_VERSION = None + + if sys.platform == 'darwin': + macos_version = LooseVersion(platform.mac_ver()[0]) + if macos_version < LooseVersion('10.10'): + if LooseVersion(QT_VERSION) >= LooseVersion('5.9'): + raise PythonQtError("Qt 5.9 or higher only works in " + "macOS 10.10 or higher. Your " + "program will fail in this " + "system.") + elif macos_version < LooseVersion('10.11'): + if LooseVersion(QT_VERSION) >= LooseVersion('5.11'): + raise PythonQtError("Qt 5.11 or higher only works in " + "macOS 10.11 or higher. Your " + "program will fail in this " + "system.") + + del macos_version + except ImportError: + API = os.environ['QT_API'] = 'pyside2' + +if API in PYSIDE2_API: + try: + from PySide2 import __version__ as PYSIDE_VERSION # analysis:ignore + from PySide2.QtCore import __version__ as QT_VERSION # analysis:ignore + + PYQT_VERSION = None + PYQT5 = False + PYSIDE2 = True + + if sys.platform == 'darwin': + macos_version = LooseVersion(platform.mac_ver()[0]) + if macos_version < LooseVersion('10.11'): + if LooseVersion(QT_VERSION) >= LooseVersion('5.11'): + raise PythonQtError("Qt 5.11 or higher only works in " + "macOS 10.11 or higher. Your " + "program will fail in this " + "system.") + + del macos_version + except ImportError: + API = os.environ['QT_API'] = 'pyqt' + +if API in PYQT4_API: + try: + import sip + try: + sip.setapi('QString', 2) + sip.setapi('QVariant', 2) + sip.setapi('QDate', 2) + sip.setapi('QDateTime', 2) + sip.setapi('QTextStream', 2) + sip.setapi('QTime', 2) + sip.setapi('QUrl', 2) + except (AttributeError, ValueError): + # PyQt < v4.6 + pass + from PyQt4.Qt import PYQT_VERSION_STR as PYQT_VERSION # analysis:ignore + from PyQt4.Qt import QT_VERSION_STR as QT_VERSION # analysis:ignore + PYSIDE_VERSION = None + PYQT5 = False + PYQT4 = True + except ImportError: + API = os.environ['QT_API'] = 'pyside' + else: + is_old_pyqt = PYQT_VERSION.startswith(('4.4', '4.5', '4.6', '4.7')) + is_pyqt46 = PYQT_VERSION.startswith('4.6') + +if API in PYSIDE_API: + try: + from PySide import __version__ as PYSIDE_VERSION # analysis:ignore + from PySide.QtCore import __version__ as QT_VERSION # analysis:ignore + PYQT_VERSION = None + PYQT5 = PYSIDE2 = False + PYSIDE = True + except ImportError: + raise PythonQtError('No Qt bindings could be found') + +# If a correct API name is passed to QT_API and it could not be found, +# switches to another and informs through the warning +if API != initial_api and binding_specified: + warnings.warn('Selected binding "{}" could not be found, ' + 'using "{}"'.format(initial_api, API), RuntimeWarning) + +API_NAME = {'pyqt5': 'PyQt5', 'pyqt': 'PyQt4', 'pyqt4': 'PyQt4', + 'pyside': 'PySide', 'pyside2':'PySide2'}[API] + +if PYQT4: + import sip + try: + API_NAME += (" (API v{0})".format(sip.getapi('QString'))) + except AttributeError: + pass diff --git a/python3.9libs/qtpy/_patch/__init__.py b/python3.9libs/qtpy/_patch/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python3.9libs/qtpy/_patch/qcombobox.py b/python3.9libs/qtpy/_patch/qcombobox.py new file mode 100644 index 0000000..d3e98be --- /dev/null +++ b/python3.9libs/qtpy/_patch/qcombobox.py @@ -0,0 +1,101 @@ +# The code below, as well as the associated test were adapted from +# qt-helpers, which was released under a 3-Clause BSD license: +# +# Copyright (c) 2015, Chris Beaumont and Thomas Robitaille +# +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the +# distribution. +# * Neither the name of the Glue project nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +def patch_qcombobox(QComboBox): + """ + In PySide, using Python objects as userData in QComboBox causes + Segmentation faults under certain conditions. Even in cases where it + doesn't, findData does not work correctly. Likewise, findData also does not + work correctly with Python objects when using PyQt4. On the other hand, + PyQt5 deals with this case correctly. We therefore patch QComboBox when + using PyQt4 and PySide to avoid issues. + """ + + from ..QtGui import QIcon + from ..QtCore import Qt, QObject + + class userDataWrapper(): + """ + This class is used to wrap any userData object. If we don't do this, + then certain types of objects can cause segmentation faults or issues + depending on whether/how __getitem__ is defined. + """ + def __init__(self, data): + self.data = data + + _addItem = QComboBox.addItem + + def addItem(self, *args, **kwargs): + if len(args) == 3 or (not isinstance(args[0], QIcon) + and len(args) == 2): + args, kwargs['userData'] = args[:-1], args[-1] + if 'userData' in kwargs: + kwargs['userData'] = userDataWrapper(kwargs['userData']) + _addItem(self, *args, **kwargs) + + _insertItem = QComboBox.insertItem + + def insertItem(self, *args, **kwargs): + if len(args) == 4 or (not isinstance(args[1], QIcon) + and len(args) == 3): + args, kwargs['userData'] = args[:-1], args[-1] + if 'userData' in kwargs: + kwargs['userData'] = userDataWrapper(kwargs['userData']) + _insertItem(self, *args, **kwargs) + + _setItemData = QComboBox.setItemData + + def setItemData(self, index, value, role=Qt.UserRole): + value = userDataWrapper(value) + _setItemData(self, index, value, role=role) + + _itemData = QComboBox.itemData + + def itemData(self, index, role=Qt.UserRole): + userData = _itemData(self, index, role=role) + if isinstance(userData, userDataWrapper): + userData = userData.data + return userData + + def findData(self, value): + for i in range(self.count()): + if self.itemData(i) == value: + return i + return -1 + + QComboBox.addItem = addItem + QComboBox.insertItem = insertItem + QComboBox.setItemData = setItemData + QComboBox.itemData = itemData + QComboBox.findData = findData \ No newline at end of file diff --git a/python3.9libs/qtpy/_patch/qheaderview.py b/python3.9libs/qtpy/_patch/qheaderview.py new file mode 100644 index 0000000..b6baddb --- /dev/null +++ b/python3.9libs/qtpy/_patch/qheaderview.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# +# Copyright © The Spyder Development Team +# +# Licensed under the terms of the MIT License +# (see LICENSE.txt for details) +import warnings + +def introduce_renamed_methods_qheaderview(QHeaderView): + + _isClickable = QHeaderView.isClickable + def sectionsClickable(self): + """ + QHeaderView.sectionsClickable() -> bool + """ + return _isClickable(self) + QHeaderView.sectionsClickable = sectionsClickable + def isClickable(self): + warnings.warn('isClickable is only available in Qt4. Use ' + 'sectionsClickable instead.', stacklevel=2) + return _isClickable(self) + QHeaderView.isClickable = isClickable + + + _isMovable = QHeaderView.isMovable + def sectionsMovable(self): + """ + QHeaderView.sectionsMovable() -> bool + """ + return _isMovable(self) + QHeaderView.sectionsMovable = sectionsMovable + def isMovable(self): + warnings.warn('isMovable is only available in Qt4. Use ' + 'sectionsMovable instead.', stacklevel=2) + return _isMovable(self) + QHeaderView.isMovable = isMovable + + + _resizeMode = QHeaderView.resizeMode + def sectionResizeMode(self, logicalIndex): + """ + QHeaderView.sectionResizeMode(int) -> QHeaderView.ResizeMode + """ + return _resizeMode(self, logicalIndex) + QHeaderView.sectionResizeMode = sectionResizeMode + def resizeMode(self, logicalIndex): + warnings.warn('resizeMode is only available in Qt4. Use ' + 'sectionResizeMode instead.', stacklevel=2) + return _resizeMode(self, logicalIndex) + QHeaderView.resizeMode = resizeMode + + _setClickable = QHeaderView.setClickable + def setSectionsClickable(self, clickable): + """ + QHeaderView.setSectionsClickable(bool) + """ + return _setClickable(self, clickable) + QHeaderView.setSectionsClickable = setSectionsClickable + def setClickable(self, clickable): + warnings.warn('setClickable is only available in Qt4. Use ' + 'setSectionsClickable instead.', stacklevel=2) + return _setClickable(self, clickable) + QHeaderView.setClickable = setClickable + + + _setMovable = QHeaderView.setMovable + def setSectionsMovable(self, movable): + """ + QHeaderView.setSectionsMovable(bool) + """ + return _setMovable(self, movable) + QHeaderView.setSectionsMovable = setSectionsMovable + def setMovable(self, movable): + warnings.warn('setMovable is only available in Qt4. Use ' + 'setSectionsMovable instead.', stacklevel=2) + return _setMovable(self, movable) + QHeaderView.setMovable = setMovable + + + _setResizeMode = QHeaderView.setResizeMode + def setSectionResizeMode(self, *args): + """ + QHeaderView.setSectionResizeMode(QHeaderView.ResizeMode) + QHeaderView.setSectionResizeMode(int, QHeaderView.ResizeMode) + """ + _setResizeMode(self, *args) + QHeaderView.setSectionResizeMode = setSectionResizeMode + def setResizeMode(self, *args): + warnings.warn('setResizeMode is only available in Qt4. Use ' + 'setSectionResizeMode instead.', stacklevel=2) + _setResizeMode(self, *args) + QHeaderView.setResizeMode = setResizeMode + + + + diff --git a/python3.9libs/qtpy/_version.py b/python3.9libs/qtpy/_version.py new file mode 100644 index 0000000..a8ef90d --- /dev/null +++ b/python3.9libs/qtpy/_version.py @@ -0,0 +1,2 @@ +version_info = (1, 9, 0) +__version__ = '.'.join(map(str, version_info)) diff --git a/python3.9libs/qtpy/compat.py b/python3.9libs/qtpy/compat.py new file mode 100644 index 0000000..f579454 --- /dev/null +++ b/python3.9libs/qtpy/compat.py @@ -0,0 +1,196 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2009- The Spyder Development Team +# Licensed under the terms of the MIT License + +""" +Compatibility functions +""" + +from __future__ import print_function +import sys +import collections + +from . import PYQT4 +from .QtWidgets import QFileDialog +from .py3compat import is_text_string, to_text_string, TEXT_TYPES + + +# ============================================================================= +# QVariant conversion utilities +# ============================================================================= +PYQT_API_1 = False +if PYQT4: + import sip + try: + PYQT_API_1 = sip.getapi('QVariant') == 1 # PyQt API #1 + except AttributeError: + # PyQt =v4.4 (API #1 and #2) and PySide >=v1.0""" + # Calling QFileDialog static method + if sys.platform == "win32": + # On Windows platforms: redirect standard outputs + _temp1, _temp2 = sys.stdout, sys.stderr + sys.stdout, sys.stderr = None, None + try: + result = QFileDialog.getExistingDirectory(parent, caption, basedir, + options) + finally: + if sys.platform == "win32": + # On Windows platforms: restore standard outputs + sys.stdout, sys.stderr = _temp1, _temp2 + if not is_text_string(result): + # PyQt API #1 + result = to_text_string(result) + return result + + +def _qfiledialog_wrapper(attr, parent=None, caption='', basedir='', + filters='', selectedfilter='', options=None): + if options is None: + options = QFileDialog.Options(0) + try: + # PyQt =v4.6 + QString = None # analysis:ignore + tuple_returned = True + try: + # PyQt >=v4.6 + func = getattr(QFileDialog, attr+'AndFilter') + except AttributeError: + # PySide or PyQt =v4.6 + output, selectedfilter = result + else: + # PyQt =v4.4 (API #1 and #2) and PySide >=v1.0""" + return _qfiledialog_wrapper('getOpenFileName', parent=parent, + caption=caption, basedir=basedir, + filters=filters, selectedfilter=selectedfilter, + options=options) + + +def getopenfilenames(parent=None, caption='', basedir='', filters='', + selectedfilter='', options=None): + """Wrapper around QtGui.QFileDialog.getOpenFileNames static method + Returns a tuple (filenames, selectedfilter) -- when dialog box is canceled, + returns a tuple (empty list, empty string) + Compatible with PyQt >=v4.4 (API #1 and #2) and PySide >=v1.0""" + return _qfiledialog_wrapper('getOpenFileNames', parent=parent, + caption=caption, basedir=basedir, + filters=filters, selectedfilter=selectedfilter, + options=options) + + +def getsavefilename(parent=None, caption='', basedir='', filters='', + selectedfilter='', options=None): + """Wrapper around QtGui.QFileDialog.getSaveFileName static method + Returns a tuple (filename, selectedfilter) -- when dialog box is canceled, + returns a tuple of empty strings + Compatible with PyQt >=v4.4 (API #1 and #2) and PySide >=v1.0""" + return _qfiledialog_wrapper('getSaveFileName', parent=parent, + caption=caption, basedir=basedir, + filters=filters, selectedfilter=selectedfilter, + options=options) diff --git a/python3.9libs/qtpy/py3compat.py b/python3.9libs/qtpy/py3compat.py new file mode 100644 index 0000000..43550b6 --- /dev/null +++ b/python3.9libs/qtpy/py3compat.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# +# Copyright © 2012-2013 Pierre Raybaut +# Licensed under the terms of the MIT License +# (see spyderlib/__init__.py for details) + +""" +spyderlib.py3compat +------------------- + +Transitional module providing compatibility functions intended to help +migrating from Python 2 to Python 3. + +This module should be fully compatible with: + * Python >=v2.6 + * Python 3 +""" + +from __future__ import print_function + +import sys +import os + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY33 = PY3 and sys.version_info[1] >= 3 + + +# ============================================================================= +# Data types +# ============================================================================= +if PY2: + # Python 2 + TEXT_TYPES = (str, unicode) + INT_TYPES = (int, long) +else: + # Python 3 + TEXT_TYPES = (str,) + INT_TYPES = (int,) +NUMERIC_TYPES = tuple(list(INT_TYPES) + [float, complex]) + + +# ============================================================================= +# Renamed/Reorganized modules +# ============================================================================= +if PY2: + # Python 2 + import __builtin__ as builtins + import ConfigParser as configparser + try: + import _winreg as winreg + except ImportError: + pass + from sys import maxint as maxsize + try: + import CStringIO as io + except ImportError: + import StringIO as io + try: + import cPickle as pickle + except ImportError: + import pickle + from UserDict import DictMixin as MutableMapping + import thread as _thread + import repr as reprlib +else: + # Python 3 + import builtins + import configparser + try: + import winreg + except ImportError: + pass + from sys import maxsize + import io + import pickle + if PY33: + from collections.abc import MutableMapping + else: + from collections import MutableMapping + import _thread + import reprlib + + +# ============================================================================= +# Strings +# ============================================================================= +if PY2: + # Python 2 + import codecs + + def u(obj): + """Make unicode object""" + return codecs.unicode_escape_decode(obj)[0] +else: + # Python 3 + def u(obj): + """Return string as it is""" + return obj + + +def is_text_string(obj): + """Return True if `obj` is a text string, False if it is anything else, + like binary data (Python 3) or QString (Python 2, PyQt API #1)""" + if PY2: + # Python 2 + return isinstance(obj, basestring) + else: + # Python 3 + return isinstance(obj, str) + + +def is_binary_string(obj): + """Return True if `obj` is a binary string, False if it is anything else""" + if PY2: + # Python 2 + return isinstance(obj, str) + else: + # Python 3 + return isinstance(obj, bytes) + + +def is_string(obj): + """Return True if `obj` is a text or binary Python string object, + False if it is anything else, like a QString (Python 2, PyQt API #1)""" + return is_text_string(obj) or is_binary_string(obj) + + +def is_unicode(obj): + """Return True if `obj` is unicode""" + if PY2: + # Python 2 + return isinstance(obj, unicode) + else: + # Python 3 + return isinstance(obj, str) + + +def to_text_string(obj, encoding=None): + """Convert `obj` to (unicode) text string""" + if PY2: + # Python 2 + if encoding is None: + return unicode(obj) + else: + return unicode(obj, encoding) + else: + # Python 3 + if encoding is None: + return str(obj) + elif isinstance(obj, str): + # In case this function is not used properly, this could happen + return obj + else: + return str(obj, encoding) + + +def to_binary_string(obj, encoding=None): + """Convert `obj` to binary string (bytes in Python 3, str in Python 2)""" + if PY2: + # Python 2 + if encoding is None: + return str(obj) + else: + return obj.encode(encoding) + else: + # Python 3 + return bytes(obj, 'utf-8' if encoding is None else encoding) + + +# ============================================================================= +# Function attributes +# ============================================================================= +def get_func_code(func): + """Return function code object""" + if PY2: + # Python 2 + return func.func_code + else: + # Python 3 + return func.__code__ + + +def get_func_name(func): + """Return function name""" + if PY2: + # Python 2 + return func.func_name + else: + # Python 3 + return func.__name__ + + +def get_func_defaults(func): + """Return function default argument values""" + if PY2: + # Python 2 + return func.func_defaults + else: + # Python 3 + return func.__defaults__ + + +# ============================================================================= +# Special method attributes +# ============================================================================= +def get_meth_func(obj): + """Return method function object""" + if PY2: + # Python 2 + return obj.im_func + else: + # Python 3 + return obj.__func__ + + +def get_meth_class_inst(obj): + """Return method class instance""" + if PY2: + # Python 2 + return obj.im_self + else: + # Python 3 + return obj.__self__ + + +def get_meth_class(obj): + """Return method class""" + if PY2: + # Python 2 + return obj.im_class + else: + # Python 3 + return obj.__self__.__class__ + + +# ============================================================================= +# Misc. +# ============================================================================= +if PY2: + # Python 2 + input = raw_input + getcwd = os.getcwdu + cmp = cmp + import string + str_lower = string.lower + from itertools import izip_longest as zip_longest +else: + # Python 3 + input = input + getcwd = os.getcwd + + def cmp(a, b): + return (a > b) - (a < b) + str_lower = str.lower + from itertools import zip_longest + + +def qbytearray_to_str(qba): + """Convert QByteArray object to str in a way compatible with Python 2/3""" + return str(bytes(qba.toHex().data()).decode()) diff --git a/python3.9libs/qtpy/tests/__init__.py b/python3.9libs/qtpy/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python3.9libs/qtpy/tests/conftest.py b/python3.9libs/qtpy/tests/conftest.py new file mode 100644 index 0000000..c631886 --- /dev/null +++ b/python3.9libs/qtpy/tests/conftest.py @@ -0,0 +1,71 @@ +import os + + +def pytest_configure(config): + """ + This function gets run by py.test at the very start + """ + + if 'USE_QT_API' in os.environ: + os.environ['QT_API'] = os.environ['USE_QT_API'].lower() + + # We need to import qtpy here to make sure that the API versions get set + # straight away. + import qtpy + + +def pytest_report_header(config): + """ + This function is used by py.test to insert a customized header into the + test report. + """ + + versions = os.linesep + versions += 'PyQt4: ' + + try: + from PyQt4 import Qt + versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR) + except ImportError: + versions += 'not installed' + except AttributeError: + versions += 'unknown version' + + versions += os.linesep + versions += 'PyQt5: ' + + try: + from PyQt5 import Qt + versions += "PyQt: {0} - Qt: {1}".format(Qt.PYQT_VERSION_STR, Qt.QT_VERSION_STR) + except ImportError: + versions += 'not installed' + except AttributeError: + versions += 'unknown version' + + versions += os.linesep + versions += 'PySide: ' + + try: + import PySide + from PySide import QtCore + versions += "PySide: {0} - Qt: {1}".format(PySide.__version__, QtCore.__version__) + except ImportError: + versions += 'not installed' + except AttributeError: + versions += 'unknown version' + + versions += os.linesep + versions += 'PySide2: ' + + try: + import PySide2 + from PySide2 import QtCore + versions += "PySide: {0} - Qt: {1}".format(PySide2.__version__, QtCore.__version__) + except ImportError: + versions += 'not installed' + except AttributeError: + versions += 'unknown version' + + versions += os.linesep + + return versions diff --git a/python3.9libs/qtpy/tests/runtests.py b/python3.9libs/qtpy/tests/runtests.py new file mode 100644 index 0000000..b54fbb4 --- /dev/null +++ b/python3.9libs/qtpy/tests/runtests.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# ---------------------------------------------------------------------------- +# Copyright © 2015- The Spyder Development Team +# +# Licensed under the terms of the MIT License +# ---------------------------------------------------------------------------- + +"""File for running tests programmatically.""" + +# Standard library imports +import sys + +# Third party imports +import qtpy # to ensure that Qt4 uses API v2 +import pytest + + +def main(): + """Run pytest tests.""" + errno = pytest.main(['-x', 'qtpy', '-v', '-rw', '--durations=10', + '--cov=qtpy', '--cov-report=term-missing']) + sys.exit(errno) + +if __name__ == '__main__': + main() diff --git a/python3.9libs/qtpy/tests/test_macos_checks.py b/python3.9libs/qtpy/tests/test_macos_checks.py new file mode 100644 index 0000000..01aa809 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_macos_checks.py @@ -0,0 +1,110 @@ +from __future__ import absolute_import + +import mock +import platform +import sys + +import pytest +from qtpy import PYQT5, PYSIDE2 + + +@pytest.mark.skipif(not PYQT5, reason="Targeted to PyQt5") +@mock.patch.object(platform, 'mac_ver') +def test_qt59_exception(mac_ver, monkeypatch): + # Remove qtpy to reimport it again + try: + del sys.modules["qtpy"] + except KeyError: + pass + + # Patch stdlib to emulate a macOS system + monkeypatch.setattr("sys.platform", 'darwin') + mac_ver.return_value = ('10.9.2',) + + # Patch Qt version + monkeypatch.setattr("PyQt5.QtCore.QT_VERSION_STR", '5.9.1') + + # This should raise an Exception + with pytest.raises(Exception) as e: + import qtpy + + assert '10.10' in str(e.value) + assert '5.9' in str(e.value) + + +@pytest.mark.skipif(not PYQT5, reason="Targeted to PyQt5") +@mock.patch.object(platform, 'mac_ver') +def test_qt59_no_exception(mac_ver, monkeypatch): + # Remove qtpy to reimport it again + try: + del sys.modules["qtpy"] + except KeyError: + pass + + # Patch stdlib to emulate a macOS system + monkeypatch.setattr("sys.platform", 'darwin') + mac_ver.return_value = ('10.10.1',) + + # Patch Qt version + monkeypatch.setattr("PyQt5.QtCore.QT_VERSION_STR", '5.9.5') + + # This should not raise an Exception + try: + import qtpy + except Exception: + pytest.fail("Error!") + + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), + reason="Targeted to PyQt5 or PySide2") +@mock.patch.object(platform, 'mac_ver') +def test_qt511_exception(mac_ver, monkeypatch): + # Remove qtpy to reimport it again + try: + del sys.modules["qtpy"] + except KeyError: + pass + + # Patch stdlib to emulate a macOS system + monkeypatch.setattr("sys.platform", 'darwin') + mac_ver.return_value = ('10.10.3',) + + # Patch Qt version + if PYQT5: + monkeypatch.setattr("PyQt5.QtCore.QT_VERSION_STR", '5.11.1') + else: + monkeypatch.setattr("PySide2.QtCore.__version__", '5.11.1') + + # This should raise an Exception + with pytest.raises(Exception) as e: + import qtpy + + assert '10.11' in str(e.value) + assert '5.11' in str(e.value) + + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), + reason="Targeted to PyQt5 or PySide2") +@mock.patch.object(platform, 'mac_ver') +def test_qt511_no_exception(mac_ver, monkeypatch): + # Remove qtpy to reimport it again + try: + del sys.modules["qtpy"] + except KeyError: + pass + + # Patch stdlib to emulate a macOS system + monkeypatch.setattr("sys.platform", 'darwin') + mac_ver.return_value = ('10.13.2',) + + # Patch Qt version + if PYQT5: + monkeypatch.setattr("PyQt5.QtCore.QT_VERSION_STR", '5.11.1') + else: + monkeypatch.setattr("PySide2.QtCore.__version__", '5.11.1') + + # This should not raise an Exception + try: + import qtpy + except Exception: + pytest.fail("Error!") diff --git a/python3.9libs/qtpy/tests/test_main.py b/python3.9libs/qtpy/tests/test_main.py new file mode 100644 index 0000000..2449249 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_main.py @@ -0,0 +1,82 @@ +import os + +from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets + + +def assert_pyside(): + """ + Make sure that we are using PySide + """ + import PySide + assert QtCore.QEvent is PySide.QtCore.QEvent + assert QtGui.QPainter is PySide.QtGui.QPainter + assert QtWidgets.QWidget is PySide.QtGui.QWidget + assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage + +def assert_pyside2(): + """ + Make sure that we are using PySide + """ + import PySide2 + assert QtCore.QEvent is PySide2.QtCore.QEvent + assert QtGui.QPainter is PySide2.QtGui.QPainter + assert QtWidgets.QWidget is PySide2.QtWidgets.QWidget + assert QtWebEngineWidgets.QWebEnginePage is PySide2.QtWebEngineWidgets.QWebEnginePage + +def assert_pyqt4(): + """ + Make sure that we are using PyQt4 + """ + import PyQt4 + assert QtCore.QEvent is PyQt4.QtCore.QEvent + assert QtGui.QPainter is PyQt4.QtGui.QPainter + assert QtWidgets.QWidget is PyQt4.QtGui.QWidget + assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage + + +def assert_pyqt5(): + """ + Make sure that we are using PyQt5 + """ + import PyQt5 + assert QtCore.QEvent is PyQt5.QtCore.QEvent + assert QtGui.QPainter is PyQt5.QtGui.QPainter + assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget + if QtWebEngineWidgets.WEBENGINE: + assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage + else: + assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebKitWidgets.QWebPage + + +def test_qt_api(): + """ + If QT_API is specified, we check that the correct Qt wrapper was used + """ + + QT_API = os.environ.get('QT_API', '').lower() + + if QT_API == 'pyside': + assert_pyside() + elif QT_API in ('pyqt', 'pyqt4'): + assert_pyqt4() + elif QT_API == 'pyqt5': + assert_pyqt5() + elif QT_API == 'pyside2': + assert_pyside2() + else: + # If the tests are run locally, USE_QT_API and QT_API may not be + # defined, but we still want to make sure qtpy is behaving sensibly. + # We should then be loading, in order of decreasing preference, PyQt5, + # PyQt4, and PySide. + try: + import PyQt5 + except ImportError: + try: + import PyQt4 + except ImportError: + import PySide + assert_pyside() + else: + assert_pyqt4() + else: + assert_pyqt5() diff --git a/python3.9libs/qtpy/tests/test_patch_qcombobox.py b/python3.9libs/qtpy/tests/test_patch_qcombobox.py new file mode 100644 index 0000000..fcafe56 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_patch_qcombobox.py @@ -0,0 +1,106 @@ +from __future__ import absolute_import + +import os +import sys + +import pytest +from qtpy import PYSIDE2, QtGui, QtWidgets + + +PY3 = sys.version[0] == "3" + + +def get_qapp(icon_path=None): + qapp = QtWidgets.QApplication.instance() + if qapp is None: + qapp = QtWidgets.QApplication(['']) + return qapp + + +class Data(object): + """ + Test class to store in userData. The __getitem__ is needed in order to + reproduce the segmentation fault. + """ + def __getitem__(self, item): + raise ValueError("Failing") + + +@pytest.mark.skipif(PY3 or (PYSIDE2 and os.environ.get('CI', None) is not None), + reason="It segfaults in Python 3 and in our CIs with PySide2") +def test_patched_qcombobox(): + """ + In PySide, using Python objects as userData in QComboBox causes + Segmentation faults under certain conditions. Even in cases where it + doesn't, findData does not work correctly. Likewise, findData also + does not work correctly with Python objects when using PyQt4. On the + other hand, PyQt5 deals with this case correctly. We therefore patch + QComboBox when using PyQt4 and PySide to avoid issues. + """ + + app = get_qapp() + + data1 = Data() + data2 = Data() + data3 = Data() + data4 = Data() + data5 = Data() + data6 = Data() + + icon1 = QtGui.QIcon() + icon2 = QtGui.QIcon() + + widget = QtWidgets.QComboBox() + widget.addItem('a', data1) + widget.insertItem(0, 'b', data2) + widget.addItem('c', data1) + widget.setItemData(2, data3) + widget.addItem(icon1, 'd', data4) + widget.insertItem(3, icon2, 'e', data5) + widget.addItem(icon1, 'f') + widget.insertItem(5, icon2, 'g') + + widget.show() + + assert widget.findData(data1) == 1 + assert widget.findData(data2) == 0 + assert widget.findData(data3) == 2 + assert widget.findData(data4) == 4 + assert widget.findData(data5) == 3 + assert widget.findData(data6) == -1 + + assert widget.itemData(0) == data2 + assert widget.itemData(1) == data1 + assert widget.itemData(2) == data3 + assert widget.itemData(3) == data5 + assert widget.itemData(4) == data4 + assert widget.itemData(5) is None + assert widget.itemData(6) is None + + assert widget.itemText(0) == 'b' + assert widget.itemText(1) == 'a' + assert widget.itemText(2) == 'c' + assert widget.itemText(3) == 'e' + assert widget.itemText(4) == 'd' + assert widget.itemText(5) == 'g' + assert widget.itemText(6) == 'f' + + +@pytest.mark.skipif((PYSIDE2 and os.environ.get('CI', None) is not None), + reason="It segfaults in our CIs with PYSIDE2") +def test_model_item(): + """ + This is a regression test for an issue that caused the call to item(0) + below to trigger segmentation faults in PySide. The issue is + non-deterministic when running the call once, so we include a loop to make + sure that we trigger the fault. + """ + app = get_qapp() + combo = QtWidgets.QComboBox() + label_data = [('a', None)] + for iter in range(10000): + combo.clear() + for i, (label, data) in enumerate(label_data): + combo.addItem(label, userData=data) + model = combo.model() + model.item(0) diff --git a/python3.9libs/qtpy/tests/test_patch_qheaderview.py b/python3.9libs/qtpy/tests/test_patch_qheaderview.py new file mode 100644 index 0000000..17037f3 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_patch_qheaderview.py @@ -0,0 +1,98 @@ +from __future__ import absolute_import + +import sys + +import pytest +from qtpy import PYSIDE, PYSIDE2, PYQT4 +from qtpy.QtWidgets import QApplication +from qtpy.QtWidgets import QHeaderView +from qtpy.QtCore import Qt +from qtpy.QtCore import QAbstractListModel + + +PY3 = sys.version[0] == "3" + + +def get_qapp(icon_path=None): + qapp = QApplication.instance() + if qapp is None: + qapp = QApplication(['']) + return qapp + + +@pytest.mark.skipif(PY3 or PYSIDE2, reason="It fails on Python 3 and PySide2") +def test_patched_qheaderview(): + """ + This will test whether QHeaderView has the new methods introduced in Qt5. + It will then create an instance of QHeaderView and test that no exceptions + are raised and that some basic behaviour works. + """ + assert QHeaderView.sectionsClickable is not None + assert QHeaderView.sectionsMovable is not None + assert QHeaderView.sectionResizeMode is not None + assert QHeaderView.setSectionsClickable is not None + assert QHeaderView.setSectionsMovable is not None + assert QHeaderView.setSectionResizeMode is not None + + # setup a model and add it to a headerview + qapp = get_qapp() + headerview = QHeaderView(Qt.Horizontal) + class Model(QAbstractListModel): + pass + model = Model() + headerview.setModel(model) + assert headerview.count() == 1 + + # test it + assert isinstance(headerview.sectionsClickable(), bool) + assert isinstance(headerview.sectionsMovable(), bool) + if PYSIDE: + assert isinstance(headerview.sectionResizeMode(0), + QHeaderView.ResizeMode) + else: + assert isinstance(headerview.sectionResizeMode(0), int) + + headerview.setSectionsClickable(True) + assert headerview.sectionsClickable() == True + headerview.setSectionsClickable(False) + assert headerview.sectionsClickable() == False + + headerview.setSectionsMovable(True) + assert headerview.sectionsMovable() == True + headerview.setSectionsMovable(False) + assert headerview.sectionsMovable() == False + + headerview.setSectionResizeMode(QHeaderView.Interactive) + assert headerview.sectionResizeMode(0) == QHeaderView.Interactive + headerview.setSectionResizeMode(QHeaderView.Fixed) + assert headerview.sectionResizeMode(0) == QHeaderView.Fixed + headerview.setSectionResizeMode(QHeaderView.Stretch) + assert headerview.sectionResizeMode(0) == QHeaderView.Stretch + headerview.setSectionResizeMode(QHeaderView.ResizeToContents) + assert headerview.sectionResizeMode(0) == QHeaderView.ResizeToContents + + headerview.setSectionResizeMode(0, QHeaderView.Interactive) + assert headerview.sectionResizeMode(0) == QHeaderView.Interactive + headerview.setSectionResizeMode(0, QHeaderView.Fixed) + assert headerview.sectionResizeMode(0) == QHeaderView.Fixed + headerview.setSectionResizeMode(0, QHeaderView.Stretch) + assert headerview.sectionResizeMode(0) == QHeaderView.Stretch + headerview.setSectionResizeMode(0, QHeaderView.ResizeToContents) + assert headerview.sectionResizeMode(0) == QHeaderView.ResizeToContents + + # test that the old methods in Qt4 raise exceptions + if PYQT4 or PYSIDE: + with pytest.warns(UserWarning): + headerview.isClickable() + with pytest.warns(UserWarning): + headerview.isMovable() + with pytest.warns(UserWarning): + headerview.resizeMode(0) + with pytest.warns(UserWarning): + headerview.setClickable(True) + with pytest.warns(UserWarning): + headerview.setMovable(True) + with pytest.warns(UserWarning): + headerview.setResizeMode(0, QHeaderView.Interactive) + + diff --git a/python3.9libs/qtpy/tests/test_qdesktopservice_split.py b/python3.9libs/qtpy/tests/test_qdesktopservice_split.py new file mode 100644 index 0000000..472f2df --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qdesktopservice_split.py @@ -0,0 +1,41 @@ +"""Test QDesktopServices split in Qt5.""" + +from __future__ import absolute_import + +import pytest +import warnings +from qtpy import PYQT4, PYSIDE + + +def test_qstandarpath(): + """Test the qtpy.QStandardPaths namespace""" + from qtpy.QtCore import QStandardPaths + + assert QStandardPaths.StandardLocation is not None + + # Attributes from QDesktopServices shouldn't be in QStandardPaths + with pytest.raises(AttributeError) as excinfo: + QStandardPaths.setUrlHandler + + +def test_qdesktopservice(): + """Test the qtpy.QDesktopServices namespace""" + from qtpy.QtGui import QDesktopServices + + assert QDesktopServices.setUrlHandler is not None + + +@pytest.mark.skipif(not (PYQT4 or PYSIDE), reason="Warning is only raised in old bindings") +def test_qdesktopservice_qt4_pyside(): + from qtpy.QtGui import QDesktopServices + # Attributes from QStandardPaths should raise a warning when imported + # from QDesktopServices + with warnings.catch_warnings(record=True) as w: + # Cause all warnings to always be triggered. + warnings.simplefilter("always") + # Try to import QtHelp. + QDesktopServices.StandardLocation + + assert len(w) == 1 + assert issubclass(w[-1].category, DeprecationWarning) + assert "deprecated" in str(w[-1].message) diff --git a/python3.9libs/qtpy/tests/test_qt3danimation.py b/python3.9libs/qtpy/tests/test_qt3danimation.py new file mode 100644 index 0000000..650be19 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qt3danimation.py @@ -0,0 +1,25 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qt3danimation(): + """Test the qtpy.Qt3DAnimation namespace""" + Qt3DAnimation = pytest.importorskip("qtpy.Qt3DAnimation") + + assert Qt3DAnimation.QAnimationController is not None + assert Qt3DAnimation.QAdditiveClipBlend is not None + assert Qt3DAnimation.QAbstractClipBlendNode is not None + assert Qt3DAnimation.QAbstractAnimation is not None + assert Qt3DAnimation.QKeyframeAnimation is not None + assert Qt3DAnimation.QAbstractAnimationClip is not None + assert Qt3DAnimation.QAbstractClipAnimator is not None + assert Qt3DAnimation.QClipAnimator is not None + assert Qt3DAnimation.QAnimationGroup is not None + assert Qt3DAnimation.QLerpClipBlend is not None + assert Qt3DAnimation.QMorphingAnimation is not None + assert Qt3DAnimation.QAnimationAspect is not None + assert Qt3DAnimation.QVertexBlendAnimation is not None + assert Qt3DAnimation.QBlendedClipAnimator is not None + assert Qt3DAnimation.QMorphTarget is not None diff --git a/python3.9libs/qtpy/tests/test_qt3dcore.py b/python3.9libs/qtpy/tests/test_qt3dcore.py new file mode 100644 index 0000000..821fbd4 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qt3dcore.py @@ -0,0 +1,44 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qt3dcore(): + """Test the qtpy.Qt3DCore namespace""" + Qt3DCore = pytest.importorskip("qtpy.Qt3DCore") + + assert Qt3DCore.QPropertyValueAddedChange is not None + assert Qt3DCore.QSkeletonLoader is not None + assert Qt3DCore.QPropertyNodeRemovedChange is not None + assert Qt3DCore.QPropertyUpdatedChange is not None + assert Qt3DCore.QAspectEngine is not None + assert Qt3DCore.QPropertyValueAddedChangeBase is not None + assert Qt3DCore.QStaticPropertyValueRemovedChangeBase is not None + assert Qt3DCore.QPropertyNodeAddedChange is not None + assert Qt3DCore.QDynamicPropertyUpdatedChange is not None + assert Qt3DCore.QStaticPropertyUpdatedChangeBase is not None + assert Qt3DCore.ChangeFlags is not None + assert Qt3DCore.QAbstractAspect is not None + assert Qt3DCore.QBackendNode is not None + assert Qt3DCore.QTransform is not None + assert Qt3DCore.QPropertyUpdatedChangeBase is not None + assert Qt3DCore.QNodeId is not None + assert Qt3DCore.QJoint is not None + assert Qt3DCore.QSceneChange is not None + assert Qt3DCore.QNodeIdTypePair is not None + assert Qt3DCore.QAbstractSkeleton is not None + assert Qt3DCore.QComponentRemovedChange is not None + assert Qt3DCore.QComponent is not None + assert Qt3DCore.QEntity is not None + assert Qt3DCore.QNodeCommand is not None + assert Qt3DCore.QNode is not None + assert Qt3DCore.QPropertyValueRemovedChange is not None + assert Qt3DCore.QPropertyValueRemovedChangeBase is not None + assert Qt3DCore.QComponentAddedChange is not None + assert Qt3DCore.QNodeCreatedChangeBase is not None + assert Qt3DCore.QNodeDestroyedChange is not None + assert Qt3DCore.QArmature is not None + assert Qt3DCore.QStaticPropertyValueAddedChangeBase is not None + assert Qt3DCore.ChangeFlag is not None + assert Qt3DCore.QSkeleton is not None diff --git a/python3.9libs/qtpy/tests/test_qt3dextras.py b/python3.9libs/qtpy/tests/test_qt3dextras.py new file mode 100644 index 0000000..f63c7d5 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qt3dextras.py @@ -0,0 +1,47 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qt3dextras(): + """Test the qtpy.Qt3DExtras namespace""" + Qt3DExtras = pytest.importorskip("qtpy.Qt3DExtras") + + assert Qt3DExtras.QTextureMaterial is not None + assert Qt3DExtras.QPhongAlphaMaterial is not None + assert Qt3DExtras.QOrbitCameraController is not None + assert Qt3DExtras.QAbstractSpriteSheet is not None + assert Qt3DExtras.QNormalDiffuseMapMaterial is not None + assert Qt3DExtras.QDiffuseSpecularMaterial is not None + assert Qt3DExtras.QSphereGeometry is not None + assert Qt3DExtras.QCuboidGeometry is not None + assert Qt3DExtras.QForwardRenderer is not None + assert Qt3DExtras.QPhongMaterial is not None + assert Qt3DExtras.QSpriteGrid is not None + assert Qt3DExtras.QDiffuseMapMaterial is not None + assert Qt3DExtras.QConeGeometry is not None + assert Qt3DExtras.QSpriteSheetItem is not None + assert Qt3DExtras.QPlaneGeometry is not None + assert Qt3DExtras.QSphereMesh is not None + assert Qt3DExtras.QNormalDiffuseSpecularMapMaterial is not None + assert Qt3DExtras.QCuboidMesh is not None + assert Qt3DExtras.QGoochMaterial is not None + assert Qt3DExtras.QText2DEntity is not None + assert Qt3DExtras.QTorusMesh is not None + assert Qt3DExtras.Qt3DWindow is not None + assert Qt3DExtras.QPerVertexColorMaterial is not None + assert Qt3DExtras.QExtrudedTextGeometry is not None + assert Qt3DExtras.QSkyboxEntity is not None + assert Qt3DExtras.QAbstractCameraController is not None + assert Qt3DExtras.QExtrudedTextMesh is not None + assert Qt3DExtras.QCylinderGeometry is not None + assert Qt3DExtras.QTorusGeometry is not None + assert Qt3DExtras.QMorphPhongMaterial is not None + assert Qt3DExtras.QPlaneMesh is not None + assert Qt3DExtras.QDiffuseSpecularMapMaterial is not None + assert Qt3DExtras.QSpriteSheet is not None + assert Qt3DExtras.QConeMesh is not None + assert Qt3DExtras.QFirstPersonCameraController is not None + assert Qt3DExtras.QMetalRoughMaterial is not None + assert Qt3DExtras.QCylinderMesh is not None diff --git a/python3.9libs/qtpy/tests/test_qt3dinput.py b/python3.9libs/qtpy/tests/test_qt3dinput.py new file mode 100644 index 0000000..48d73d0 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qt3dinput.py @@ -0,0 +1,33 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qt3dinput(): + """Test the qtpy.Qt3DInput namespace""" + Qt3DInput = pytest.importorskip("qtpy.Qt3DInput") + + assert Qt3DInput.QAxisAccumulator is not None + assert Qt3DInput.QInputSettings is not None + assert Qt3DInput.QAnalogAxisInput is not None + assert Qt3DInput.QAbstractAxisInput is not None + assert Qt3DInput.QMouseHandler is not None + assert Qt3DInput.QButtonAxisInput is not None + assert Qt3DInput.QInputSequence is not None + assert Qt3DInput.QWheelEvent is not None + assert Qt3DInput.QActionInput is not None + assert Qt3DInput.QKeyboardDevice is not None + assert Qt3DInput.QMouseDevice is not None + assert Qt3DInput.QAxis is not None + assert Qt3DInput.QInputChord is not None + assert Qt3DInput.QMouseEvent is not None + assert Qt3DInput.QKeyboardHandler is not None + assert Qt3DInput.QKeyEvent is not None + assert Qt3DInput.QAbstractActionInput is not None + assert Qt3DInput.QInputAspect is not None + assert Qt3DInput.QLogicalDevice is not None + assert Qt3DInput.QAction is not None + assert Qt3DInput.QAbstractPhysicalDevice is not None + assert Qt3DInput.QAxisSetting is not None + diff --git a/python3.9libs/qtpy/tests/test_qt3dlogic.py b/python3.9libs/qtpy/tests/test_qt3dlogic.py new file mode 100644 index 0000000..34f7de6 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qt3dlogic.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qt3dlogic(): + """Test the qtpy.Qt3DLogic namespace""" + Qt3DLogic = pytest.importorskip("qtpy.Qt3DLogic") + + assert Qt3DLogic.QLogicAspect is not None + assert Qt3DLogic.QFrameAction is not None diff --git a/python3.9libs/qtpy/tests/test_qt3drender.py b/python3.9libs/qtpy/tests/test_qt3drender.py new file mode 100644 index 0000000..f464768 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qt3drender.py @@ -0,0 +1,119 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qt3drender(): + """Test the qtpy.Qt3DRender namespace""" + Qt3DRender = pytest.importorskip("qtpy.Qt3DRender") + + assert Qt3DRender.QPointSize is not None + assert Qt3DRender.QFrustumCulling is not None + assert Qt3DRender.QPickPointEvent is not None + assert Qt3DRender.QRenderPassFilter is not None + assert Qt3DRender.QMesh is not None + assert Qt3DRender.QRayCaster is not None + assert Qt3DRender.QStencilMask is not None + assert Qt3DRender.QPickLineEvent is not None + assert Qt3DRender.QPickTriangleEvent is not None + assert Qt3DRender.QRenderState is not None + assert Qt3DRender.QTextureWrapMode is not None + assert Qt3DRender.QRenderPass is not None + assert Qt3DRender.QGeometryRenderer is not None + assert Qt3DRender.QAttribute is not None + assert Qt3DRender.QStencilOperation is not None + assert Qt3DRender.QScissorTest is not None + assert Qt3DRender.QTextureCubeMapArray is not None + assert Qt3DRender.QRenderTarget is not None + assert Qt3DRender.QStencilTest is not None + assert Qt3DRender.QTextureData is not None + assert Qt3DRender.QBuffer is not None + assert Qt3DRender.QLineWidth is not None + assert Qt3DRender.QLayer is not None + assert Qt3DRender.QTextureRectangle is not None + assert Qt3DRender.QRenderTargetSelector is not None + assert Qt3DRender.QPickingSettings is not None + assert Qt3DRender.QCullFace is not None + assert Qt3DRender.QAbstractFunctor is not None + assert Qt3DRender.PropertyReaderInterface is not None + assert Qt3DRender.QMaterial is not None + assert Qt3DRender.QAlphaCoverage is not None + assert Qt3DRender.QClearBuffers is not None + assert Qt3DRender.QAlphaTest is not None + assert Qt3DRender.QStencilOperationArguments is not None + assert Qt3DRender.QTexture2DMultisample is not None + assert Qt3DRender.QLevelOfDetailSwitch is not None + assert Qt3DRender.QRenderStateSet is not None + assert Qt3DRender.QViewport is not None + assert Qt3DRender.QObjectPicker is not None + assert Qt3DRender.QPolygonOffset is not None + assert Qt3DRender.QRenderSettings is not None + assert Qt3DRender.QFrontFace is not None + assert Qt3DRender.QTexture3D is not None + assert Qt3DRender.QTextureBuffer is not None + assert Qt3DRender.QTechniqueFilter is not None + assert Qt3DRender.QLayerFilter is not None + assert Qt3DRender.QFilterKey is not None + assert Qt3DRender.QRenderSurfaceSelector is not None + assert Qt3DRender.QEnvironmentLight is not None + assert Qt3DRender.QMemoryBarrier is not None + assert Qt3DRender.QNoDepthMask is not None + assert Qt3DRender.QBlitFramebuffer is not None + assert Qt3DRender.QGraphicsApiFilter is not None + assert Qt3DRender.QAbstractTexture is not None + assert Qt3DRender.QRenderCaptureReply is not None + assert Qt3DRender.QAbstractLight is not None + assert Qt3DRender.QAbstractRayCaster is not None + assert Qt3DRender.QDirectionalLight is not None + assert Qt3DRender.QDispatchCompute is not None + assert Qt3DRender.QBufferDataGenerator is not None + assert Qt3DRender.QPointLight is not None + assert Qt3DRender.QStencilTestArguments is not None + assert Qt3DRender.QTexture1D is not None + assert Qt3DRender.QCameraSelector is not None + assert Qt3DRender.QProximityFilter is not None + assert Qt3DRender.QTexture1DArray is not None + assert Qt3DRender.QBlendEquation is not None + assert Qt3DRender.QTextureImageDataGenerator is not None + assert Qt3DRender.QSpotLight is not None + assert Qt3DRender.QEffect is not None + assert Qt3DRender.QSeamlessCubemap is not None + assert Qt3DRender.QTexture2DMultisampleArray is not None + assert Qt3DRender.QComputeCommand is not None + assert Qt3DRender.QFrameGraphNode is not None + assert Qt3DRender.QSortPolicy is not None + assert Qt3DRender.QTextureImageData is not None + assert Qt3DRender.QCamera is not None + assert Qt3DRender.QGeometry is not None + assert Qt3DRender.QScreenRayCaster is not None + assert Qt3DRender.QClipPlane is not None + assert Qt3DRender.QMultiSampleAntiAliasing is not None + assert Qt3DRender.QRayCasterHit is not None + assert Qt3DRender.QAbstractTextureImage is not None + assert Qt3DRender.QNoDraw is not None + assert Qt3DRender.QPickEvent is not None + assert Qt3DRender.QRenderCapture is not None + assert Qt3DRender.QDepthTest is not None + assert Qt3DRender.QParameter is not None + assert Qt3DRender.QLevelOfDetail is not None + assert Qt3DRender.QGeometryFactory is not None + assert Qt3DRender.QTexture2D is not None + assert Qt3DRender.QRenderAspect is not None + assert Qt3DRender.QPaintedTextureImage is not None + assert Qt3DRender.QDithering is not None + assert Qt3DRender.QTextureGenerator is not None + assert Qt3DRender.QBlendEquationArguments is not None + assert Qt3DRender.QLevelOfDetailBoundingSphere is not None + assert Qt3DRender.QColorMask is not None + assert Qt3DRender.QSceneLoader is not None + assert Qt3DRender.QTextureLoader is not None + assert Qt3DRender.QShaderProgram is not None + assert Qt3DRender.QTextureCubeMap is not None + assert Qt3DRender.QTexture2DArray is not None + assert Qt3DRender.QTextureImage is not None + assert Qt3DRender.QCameraLens is not None + assert Qt3DRender.QRenderTargetOutput is not None + assert Qt3DRender.QShaderProgramBuilder is not None + assert Qt3DRender.QTechnique is not None + assert Qt3DRender.QShaderData is not None diff --git a/python3.9libs/qtpy/tests/test_qtcharts.py b/python3.9libs/qtpy/tests/test_qtcharts.py new file mode 100644 index 0000000..4c72dbc --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtcharts.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYSIDE2 + + +@pytest.mark.skipif(not PYSIDE2, reason="Only available by default in PySide2") +def test_qtcharts(): + """Test the qtpy.QtCharts namespace""" + from qtpy import QtCharts + assert QtCharts.QtCharts.QChart is not None diff --git a/python3.9libs/qtpy/tests/test_qtcore.py b/python3.9libs/qtpy/tests/test_qtcore.py new file mode 100644 index 0000000..7a337bf --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtcore.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2, QtCore + +"""Test QtCore.""" + + +def test_qtmsghandler(): + """Test qtpy.QtMsgHandler""" + assert QtCore.qInstallMessageHandler is not None + + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), + reason="Targeted to PyQt5 or PySide2") +def test_DateTime_toPython(): + """Test QDateTime.toPython""" + assert QtCore.QDateTime.toPython is not None diff --git a/python3.9libs/qtpy/tests/test_qtdatavisualization.py b/python3.9libs/qtpy/tests/test_qtdatavisualization.py new file mode 100644 index 0000000..32142d6 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtdatavisualization.py @@ -0,0 +1,46 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qtdatavisualization(): + """Test the qtpy.QtDataVisualization namespace""" + QtDataVisualization = pytest.importorskip("qtpy.QtDataVisualization") + + assert QtDataVisualization.QScatter3DSeries is not None + assert QtDataVisualization.QSurfaceDataItem is not None + assert QtDataVisualization.QSurface3DSeries is not None + assert QtDataVisualization.QAbstract3DInputHandler is not None + assert QtDataVisualization.QHeightMapSurfaceDataProxy is not None + assert QtDataVisualization.QAbstractDataProxy is not None + assert QtDataVisualization.Q3DCamera is not None + assert QtDataVisualization.QAbstract3DGraph is not None + assert QtDataVisualization.QCustom3DVolume is not None + assert QtDataVisualization.Q3DInputHandler is not None + assert QtDataVisualization.QBarDataProxy is not None + assert QtDataVisualization.QSurfaceDataProxy is not None + assert QtDataVisualization.QScatterDataItem is not None + assert QtDataVisualization.Q3DLight is not None + assert QtDataVisualization.QScatterDataProxy is not None + assert QtDataVisualization.QValue3DAxis is not None + assert QtDataVisualization.Q3DBars is not None + assert QtDataVisualization.QBarDataItem is not None + assert QtDataVisualization.QItemModelBarDataProxy is not None + assert QtDataVisualization.Q3DTheme is not None + assert QtDataVisualization.QCustom3DItem is not None + assert QtDataVisualization.QItemModelScatterDataProxy is not None + assert QtDataVisualization.QValue3DAxisFormatter is not None + assert QtDataVisualization.QItemModelSurfaceDataProxy is not None + assert QtDataVisualization.Q3DScatter is not None + assert QtDataVisualization.QTouch3DInputHandler is not None + assert QtDataVisualization.QBar3DSeries is not None + assert QtDataVisualization.QAbstract3DAxis is not None + assert QtDataVisualization.Q3DScene is not None + assert QtDataVisualization.QCategory3DAxis is not None + assert QtDataVisualization.QAbstract3DSeries is not None + assert QtDataVisualization.Q3DObject is not None + assert QtDataVisualization.QCustom3DLabel is not None + assert QtDataVisualization.Q3DSurface is not None + assert QtDataVisualization.QLogValue3DAxisFormatter is not None + diff --git a/python3.9libs/qtpy/tests/test_qtdesigner.py b/python3.9libs/qtpy/tests/test_qtdesigner.py new file mode 100644 index 0000000..0327c6f --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtdesigner.py @@ -0,0 +1,28 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYSIDE2, PYSIDE + +@pytest.mark.skipif(PYSIDE2 or PYSIDE, reason="QtDesigner is not avalaible in PySide/PySide2") +def test_qtdesigner(): + from qtpy import QtDesigner + """Test the qtpy.QtDesigner namespace""" + assert QtDesigner.QAbstractExtensionFactory is not None + assert QtDesigner.QAbstractExtensionManager is not None + assert QtDesigner.QDesignerActionEditorInterface is not None + assert QtDesigner.QDesignerContainerExtension is not None + assert QtDesigner.QDesignerCustomWidgetCollectionInterface is not None + assert QtDesigner.QDesignerCustomWidgetInterface is not None + assert QtDesigner.QDesignerFormEditorInterface is not None + assert QtDesigner.QDesignerFormWindowCursorInterface is not None + assert QtDesigner.QDesignerFormWindowInterface is not None + assert QtDesigner.QDesignerFormWindowManagerInterface is not None + assert QtDesigner.QDesignerMemberSheetExtension is not None + assert QtDesigner.QDesignerObjectInspectorInterface is not None + assert QtDesigner.QDesignerPropertyEditorInterface is not None + assert QtDesigner.QDesignerPropertySheetExtension is not None + assert QtDesigner.QDesignerTaskMenuExtension is not None + assert QtDesigner.QDesignerWidgetBoxInterface is not None + assert QtDesigner.QExtensionFactory is not None + assert QtDesigner.QExtensionManager is not None + assert QtDesigner.QFormBuilder is not None \ No newline at end of file diff --git a/python3.9libs/qtpy/tests/test_qthelp.py b/python3.9libs/qtpy/tests/test_qthelp.py new file mode 100644 index 0000000..2b70ca7 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qthelp.py @@ -0,0 +1,22 @@ +"""Test for QtHelp namespace.""" + +from __future__ import absolute_import + +import pytest + + +def test_qthelp(): + """Test the qtpy.QtHelp namespace.""" + from qtpy import QtHelp + + assert QtHelp.QHelpContentItem is not None + assert QtHelp.QHelpContentModel is not None + assert QtHelp.QHelpContentWidget is not None + assert QtHelp.QHelpEngine is not None + assert QtHelp.QHelpEngineCore is not None + assert QtHelp.QHelpIndexModel is not None + assert QtHelp.QHelpIndexWidget is not None + assert QtHelp.QHelpSearchEngine is not None + assert QtHelp.QHelpSearchQuery is not None + assert QtHelp.QHelpSearchQueryWidget is not None + assert QtHelp.QHelpSearchResultWidget is not None diff --git a/python3.9libs/qtpy/tests/test_qtlocation.py b/python3.9libs/qtpy/tests/test_qtlocation.py new file mode 100644 index 0000000..78bf933 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtlocation.py @@ -0,0 +1,48 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qtlocation(): + """Test the qtpy.QtLocation namespace""" + from qtpy import QtLocation + assert QtLocation.QGeoCodeReply is not None + assert QtLocation.QGeoCodingManager is not None + assert QtLocation.QGeoCodingManagerEngine is not None + assert QtLocation.QGeoManeuver is not None + assert QtLocation.QGeoRoute is not None + assert QtLocation.QGeoRouteReply is not None + assert QtLocation.QGeoRouteRequest is not None + assert QtLocation.QGeoRouteSegment is not None + assert QtLocation.QGeoRoutingManager is not None + assert QtLocation.QGeoRoutingManagerEngine is not None + assert QtLocation.QGeoServiceProvider is not None + #assert QtLocation.QGeoServiceProviderFactory is not None + assert QtLocation.QPlace is not None + assert QtLocation.QPlaceAttribute is not None + assert QtLocation.QPlaceCategory is not None + assert QtLocation.QPlaceContactDetail is not None + assert QtLocation.QPlaceContent is not None + assert QtLocation.QPlaceContentReply is not None + assert QtLocation.QPlaceContentRequest is not None + assert QtLocation.QPlaceDetailsReply is not None + assert QtLocation.QPlaceEditorial is not None + assert QtLocation.QPlaceIcon is not None + assert QtLocation.QPlaceIdReply is not None + assert QtLocation.QPlaceImage is not None + assert QtLocation.QPlaceManager is not None + assert QtLocation.QPlaceManagerEngine is not None + assert QtLocation.QPlaceMatchReply is not None + assert QtLocation.QPlaceMatchRequest is not None + assert QtLocation.QPlaceProposedSearchResult is not None + assert QtLocation.QPlaceRatings is not None + assert QtLocation.QPlaceReply is not None + assert QtLocation.QPlaceResult is not None + assert QtLocation.QPlaceReview is not None + assert QtLocation.QPlaceSearchReply is not None + assert QtLocation.QPlaceSearchRequest is not None + assert QtLocation.QPlaceSearchResult is not None + assert QtLocation.QPlaceSearchSuggestionReply is not None + assert QtLocation.QPlaceSupplier is not None + assert QtLocation.QPlaceUser is not None diff --git a/python3.9libs/qtpy/tests/test_qtmultimedia.py b/python3.9libs/qtpy/tests/test_qtmultimedia.py new file mode 100644 index 0000000..7fc5cf6 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtmultimedia.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import +import os +import sys + +import pytest + + +@pytest.mark.skipif(os.name == 'nt' and sys.version_info[:2] == (3, 5), + reason="Conda packages don't seem to include QtMultimedia") +def test_qtmultimedia(): + """Test the qtpy.QtMultimedia namespace""" + from qtpy import QtMultimedia + + assert QtMultimedia.QAbstractVideoBuffer is not None + assert QtMultimedia.QAudio is not None + assert QtMultimedia.QAudioDeviceInfo is not None + assert QtMultimedia.QAudioInput is not None + assert QtMultimedia.QSound is not None diff --git a/python3.9libs/qtpy/tests/test_qtmultimediawidgets.py b/python3.9libs/qtpy/tests/test_qtmultimediawidgets.py new file mode 100644 index 0000000..2bb52d5 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtmultimediawidgets.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import +import os +import sys + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +@pytest.mark.skipif(os.name == 'nt' and sys.version_info[:2] == (3, 5), + reason="Conda packages don't seem to include QtMultimedia") +def test_qtmultimediawidgets(): + """Test the qtpy.QtMultimediaWidgets namespace""" + from qtpy import QtMultimediaWidgets + + assert QtMultimediaWidgets.QCameraViewfinder is not None + assert QtMultimediaWidgets.QGraphicsVideoItem is not None + assert QtMultimediaWidgets.QVideoWidget is not None + #assert QtMultimediaWidgets.QVideoWidgetControl is not None diff --git a/python3.9libs/qtpy/tests/test_qtnetwork.py b/python3.9libs/qtpy/tests/test_qtnetwork.py new file mode 100644 index 0000000..7f64591 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtnetwork.py @@ -0,0 +1,43 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYSIDE, PYSIDE2, QtNetwork + + +def test_qtnetwork(): + """Test the qtpy.QtNetwork namespace""" + assert QtNetwork.QAbstractNetworkCache is not None + assert QtNetwork.QNetworkCacheMetaData is not None + if not PYSIDE and not PYSIDE2: + assert QtNetwork.QHttpMultiPart is not None + assert QtNetwork.QHttpPart is not None + assert QtNetwork.QNetworkAccessManager is not None + assert QtNetwork.QNetworkCookie is not None + assert QtNetwork.QNetworkCookieJar is not None + assert QtNetwork.QNetworkDiskCache is not None + assert QtNetwork.QNetworkReply is not None + assert QtNetwork.QNetworkRequest is not None + assert QtNetwork.QNetworkConfigurationManager is not None + assert QtNetwork.QNetworkConfiguration is not None + assert QtNetwork.QNetworkSession is not None + assert QtNetwork.QAuthenticator is not None + assert QtNetwork.QHostAddress is not None + assert QtNetwork.QHostInfo is not None + assert QtNetwork.QNetworkAddressEntry is not None + assert QtNetwork.QNetworkInterface is not None + assert QtNetwork.QNetworkProxy is not None + assert QtNetwork.QNetworkProxyFactory is not None + assert QtNetwork.QNetworkProxyQuery is not None + assert QtNetwork.QAbstractSocket is not None + assert QtNetwork.QLocalServer is not None + assert QtNetwork.QLocalSocket is not None + assert QtNetwork.QTcpServer is not None + assert QtNetwork.QTcpSocket is not None + assert QtNetwork.QUdpSocket is not None + if not PYSIDE: + assert QtNetwork.QSslCertificate is not None + assert QtNetwork.QSslCipher is not None + assert QtNetwork.QSslConfiguration is not None + assert QtNetwork.QSslError is not None + assert QtNetwork.QSslKey is not None + assert QtNetwork.QSslSocket is not None diff --git a/python3.9libs/qtpy/tests/test_qtprintsupport.py b/python3.9libs/qtpy/tests/test_qtprintsupport.py new file mode 100644 index 0000000..2e8f786 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtprintsupport.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import + +import pytest +from qtpy import QtPrintSupport + + +def test_qtprintsupport(): + """Test the qtpy.QtPrintSupport namespace""" + assert QtPrintSupport.QAbstractPrintDialog is not None + assert QtPrintSupport.QPageSetupDialog is not None + assert QtPrintSupport.QPrintDialog is not None + assert QtPrintSupport.QPrintPreviewDialog is not None + assert QtPrintSupport.QPrintEngine is not None + assert QtPrintSupport.QPrinter is not None + assert QtPrintSupport.QPrinterInfo is not None + assert QtPrintSupport.QPrintPreviewWidget is not None + + diff --git a/python3.9libs/qtpy/tests/test_qtqml.py b/python3.9libs/qtpy/tests/test_qtqml.py new file mode 100644 index 0000000..a6d7ca9 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtqml.py @@ -0,0 +1,34 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qtqml(): + """Test the qtpy.QtQml namespace""" + from qtpy import QtQml + assert QtQml.QJSEngine is not None + assert QtQml.QJSValue is not None + assert QtQml.QJSValueIterator is not None + assert QtQml.QQmlAbstractUrlInterceptor is not None + assert QtQml.QQmlApplicationEngine is not None + assert QtQml.QQmlComponent is not None + assert QtQml.QQmlContext is not None + assert QtQml.QQmlEngine is not None + assert QtQml.QQmlImageProviderBase is not None + assert QtQml.QQmlError is not None + assert QtQml.QQmlExpression is not None + assert QtQml.QQmlExtensionPlugin is not None + assert QtQml.QQmlFileSelector is not None + assert QtQml.QQmlIncubationController is not None + assert QtQml.QQmlIncubator is not None + if not PYSIDE2: + # https://wiki.qt.io/Qt_for_Python_Missing_Bindings#QtQml + assert QtQml.QQmlListProperty is not None + assert QtQml.QQmlListReference is not None + assert QtQml.QQmlNetworkAccessManagerFactory is not None + assert QtQml.QQmlParserStatus is not None + assert QtQml.QQmlProperty is not None + assert QtQml.QQmlPropertyValueSource is not None + assert QtQml.QQmlScriptString is not None + assert QtQml.QQmlPropertyMap is not None diff --git a/python3.9libs/qtpy/tests/test_qtquick.py b/python3.9libs/qtpy/tests/test_qtquick.py new file mode 100644 index 0000000..257fd74 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtquick.py @@ -0,0 +1,53 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qtquick(): + """Test the qtpy.QtQuick namespace""" + from qtpy import QtQuick + assert QtQuick.QQuickAsyncImageProvider is not None + if not PYSIDE2: + assert QtQuick.QQuickCloseEvent is not None + assert QtQuick.QQuickFramebufferObject is not None + assert QtQuick.QQuickImageProvider is not None + assert QtQuick.QQuickImageResponse is not None + assert QtQuick.QQuickItem is not None + assert QtQuick.QQuickItemGrabResult is not None + assert QtQuick.QQuickPaintedItem is not None + assert QtQuick.QQuickRenderControl is not None + assert QtQuick.QQuickTextDocument is not None + assert QtQuick.QQuickTextureFactory is not None + assert QtQuick.QQuickView is not None + assert QtQuick.QQuickWindow is not None + assert QtQuick.QSGAbstractRenderer is not None + assert QtQuick.QSGBasicGeometryNode is not None + assert QtQuick.QSGClipNode is not None + assert QtQuick.QSGDynamicTexture is not None + assert QtQuick.QSGEngine is not None + if not PYSIDE2: + assert QtQuick.QSGFlatColorMaterial is not None + assert QtQuick.QSGGeometry is not None + assert QtQuick.QSGGeometryNode is not None + #assert QtQuick.QSGImageNode is not None + if not PYSIDE2: + assert QtQuick.QSGMaterial is not None + assert QtQuick.QSGMaterialShader is not None + assert QtQuick.QSGMaterialType is not None + assert QtQuick.QSGNode is not None + assert QtQuick.QSGOpacityNode is not None + if not PYSIDE2: + assert QtQuick.QSGOpaqueTextureMaterial is not None + #assert QtQuick.QSGRectangleNode is not None + #assert QtQuick.QSGRenderNode is not None + #assert QtQuick.QSGRendererInterface is not None + assert QtQuick.QSGSimpleRectNode is not None + assert QtQuick.QSGSimpleTextureNode is not None + assert QtQuick.QSGTexture is not None + if not PYSIDE2: + assert QtQuick.QSGTextureMaterial is not None + assert QtQuick.QSGTextureProvider is not None + assert QtQuick.QSGTransformNode is not None + if not PYSIDE2: + assert QtQuick.QSGVertexColorMaterial is not None diff --git a/python3.9libs/qtpy/tests/test_qtquickwidgets.py b/python3.9libs/qtpy/tests/test_qtquickwidgets.py new file mode 100644 index 0000000..0b41a8b --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtquickwidgets.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qtquickwidgets(): + """Test the qtpy.QtQuickWidgets namespace""" + from qtpy import QtQuickWidgets + assert QtQuickWidgets.QQuickWidget is not None diff --git a/python3.9libs/qtpy/tests/test_qtsql.py b/python3.9libs/qtpy/tests/test_qtsql.py new file mode 100644 index 0000000..1e7404f --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtsql.py @@ -0,0 +1,24 @@ +from __future__ import absolute_import + +import pytest +from qtpy import QtSql + +def test_qtsql(): + """Test the qtpy.QtSql namespace""" + assert QtSql.QSqlDatabase is not None + assert QtSql.QSqlDriverCreatorBase is not None + assert QtSql.QSqlDriver is not None + assert QtSql.QSqlError is not None + assert QtSql.QSqlField is not None + assert QtSql.QSqlIndex is not None + assert QtSql.QSqlQuery is not None + assert QtSql.QSqlRecord is not None + assert QtSql.QSqlResult is not None + assert QtSql.QSqlQueryModel is not None + assert QtSql.QSqlRelationalDelegate is not None + assert QtSql.QSqlRelation is not None + assert QtSql.QSqlRelationalTableModel is not None + assert QtSql.QSqlTableModel is not None + + # Following modules are not (yet) part of any wrapper: + # QSqlDriverCreator, QSqlDriverPlugin diff --git a/python3.9libs/qtpy/tests/test_qtsvg.py b/python3.9libs/qtpy/tests/test_qtsvg.py new file mode 100644 index 0000000..74d8522 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtsvg.py @@ -0,0 +1,13 @@ +from __future__ import absolute_import + +import pytest + + +def test_qtsvg(): + """Test the qtpy.QtSvg namespace""" + from qtpy import QtSvg + + assert QtSvg.QGraphicsSvgItem is not None + assert QtSvg.QSvgGenerator is not None + assert QtSvg.QSvgRenderer is not None + assert QtSvg.QSvgWidget is not None diff --git a/python3.9libs/qtpy/tests/test_qttest.py b/python3.9libs/qtpy/tests/test_qttest.py new file mode 100644 index 0000000..5d2ab9e --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qttest.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +import pytest +from qtpy import QtTest + + +def test_qttest(): + """Test the qtpy.QtTest namespace""" + assert QtTest.QTest is not None diff --git a/python3.9libs/qtpy/tests/test_qtwebchannel.py b/python3.9libs/qtpy/tests/test_qtwebchannel.py new file mode 100644 index 0000000..2beb70c --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtwebchannel.py @@ -0,0 +1,13 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qtwebchannel(): + """Test the qtpy.QtWebChannel namespace""" + from qtpy import QtWebChannel + + assert QtWebChannel.QWebChannel is not None + assert QtWebChannel.QWebChannelAbstractTransport is not None + diff --git a/python3.9libs/qtpy/tests/test_qtwebenginewidgets.py b/python3.9libs/qtpy/tests/test_qtwebenginewidgets.py new file mode 100644 index 0000000..77c8e1f --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtwebenginewidgets.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +import pytest +from qtpy import QtWebEngineWidgets + + +def test_qtwebenginewidgets(): + """Test the qtpy.QtWebSockets namespace""" + + assert QtWebEngineWidgets.QWebEnginePage is not None + assert QtWebEngineWidgets.QWebEngineView is not None + assert QtWebEngineWidgets.QWebEngineSettings is not None diff --git a/python3.9libs/qtpy/tests/test_qtwebsockets.py b/python3.9libs/qtpy/tests/test_qtwebsockets.py new file mode 100644 index 0000000..5bdcc32 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtwebsockets.py @@ -0,0 +1,15 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYQT5, PYSIDE2 + +@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings") +def test_qtwebsockets(): + """Test the qtpy.QtWebSockets namespace""" + from qtpy import QtWebSockets + + assert QtWebSockets.QMaskGenerator is not None + assert QtWebSockets.QWebSocket is not None + assert QtWebSockets.QWebSocketCorsAuthenticator is not None + assert QtWebSockets.QWebSocketProtocol is not None + assert QtWebSockets.QWebSocketServer is not None diff --git a/python3.9libs/qtpy/tests/test_qtxmlpatterns.py b/python3.9libs/qtpy/tests/test_qtxmlpatterns.py new file mode 100644 index 0000000..4c6d4cb --- /dev/null +++ b/python3.9libs/qtpy/tests/test_qtxmlpatterns.py @@ -0,0 +1,25 @@ +from __future__ import absolute_import + +import pytest +from qtpy import PYSIDE2, PYSIDE + +def test_qtxmlpatterns(): + """Test the qtpy.QtXmlPatterns namespace""" + from qtpy import QtXmlPatterns + assert QtXmlPatterns.QAbstractMessageHandler is not None + assert QtXmlPatterns.QAbstractUriResolver is not None + assert QtXmlPatterns.QAbstractXmlNodeModel is not None + assert QtXmlPatterns.QAbstractXmlReceiver is not None + if not PYSIDE2 and not PYSIDE: + assert QtXmlPatterns.QSimpleXmlNodeModel is not None + assert QtXmlPatterns.QSourceLocation is not None + assert QtXmlPatterns.QXmlFormatter is not None + assert QtXmlPatterns.QXmlItem is not None + assert QtXmlPatterns.QXmlName is not None + assert QtXmlPatterns.QXmlNamePool is not None + assert QtXmlPatterns.QXmlNodeModelIndex is not None + assert QtXmlPatterns.QXmlQuery is not None + assert QtXmlPatterns.QXmlResultItems is not None + assert QtXmlPatterns.QXmlSchema is not None + assert QtXmlPatterns.QXmlSchemaValidator is not None + assert QtXmlPatterns.QXmlSerializer is not None diff --git a/python3.9libs/qtpy/tests/test_uic.py b/python3.9libs/qtpy/tests/test_uic.py new file mode 100644 index 0000000..9a0fd28 --- /dev/null +++ b/python3.9libs/qtpy/tests/test_uic.py @@ -0,0 +1,86 @@ +import os +import sys +import contextlib + +import pytest +from qtpy import PYSIDE2, QtWidgets +from qtpy.QtWidgets import QComboBox +from qtpy import uic +from qtpy.uic import loadUi + + +QCOMBOBOX_SUBCLASS = """ +from qtpy.QtWidgets import QComboBox +class _QComboBoxSubclass(QComboBox): + pass +""" + +@contextlib.contextmanager +def enabled_qcombobox_subclass(tmpdir): + """ + Context manager that sets up a temporary module with a QComboBox subclass + and then removes it once we are done. + """ + + with open(tmpdir.join('qcombobox_subclass.py').strpath, 'w') as f: + f.write(QCOMBOBOX_SUBCLASS) + + sys.path.insert(0, tmpdir.strpath) + + yield + + sys.path.pop(0) + + +def get_qapp(icon_path=None): + """ + Helper function to return a QApplication instance + """ + qapp = QtWidgets.QApplication.instance() + if qapp is None: + qapp = QtWidgets.QApplication(['']) + return qapp + + +@pytest.mark.skipif((PYSIDE2 and os.environ.get('CI', None) is not None), + reason="It segfaults in our CIs with PYSIDE2") +def test_load_ui(): + """ + Make sure that the patched loadUi function behaves as expected with a + simple .ui file. + """ + app = get_qapp() + ui = loadUi(os.path.join(os.path.dirname(__file__), 'test.ui')) + assert isinstance(ui.pushButton, QtWidgets.QPushButton) + assert isinstance(ui.comboBox, QComboBox) + + +@pytest.mark.skipif((PYSIDE2 and os.environ.get('CI', None) is not None), + reason="It segfaults in our CIs with PYSIDE2") +def test_load_ui_custom_auto(tmpdir): + """ + Test that we can load a .ui file with custom widgets without having to + explicitly specify a dictionary of custom widgets, even in the case of + PySide. + """ + + app = get_qapp() + + with enabled_qcombobox_subclass(tmpdir): + from qcombobox_subclass import _QComboBoxSubclass + ui = loadUi(os.path.join(os.path.dirname(__file__), 'test_custom.ui')) + + assert isinstance(ui.pushButton, QtWidgets.QPushButton) + assert isinstance(ui.comboBox, _QComboBoxSubclass) + + +def test_load_full_uic(): + """Test that we load the full uic objects for PyQt5 and PyQt4.""" + QT_API = os.environ.get('QT_API', '').lower() + if QT_API.startswith('pyside'): + assert hasattr(uic, 'loadUi') + assert not hasattr(uic, 'loadUiType') + else: + objects = ['compileUi', 'compileUiDir', 'loadUi', 'loadUiType', + 'widgetPluginPath'] + assert all([hasattr(uic, o) for o in objects]) diff --git a/python3.9libs/qtpy/uic.py b/python3.9libs/qtpy/uic.py new file mode 100644 index 0000000..07d7a78 --- /dev/null +++ b/python3.9libs/qtpy/uic.py @@ -0,0 +1,228 @@ +import os + +from . import PYSIDE, PYSIDE2, PYQT4, PYQT5 +from .QtWidgets import QComboBox + + +if PYQT5: + + from PyQt5.uic import * + +elif PYQT4: + + from PyQt4.uic import * + +else: + + __all__ = ['loadUi'] + + # In PySide, loadUi does not exist, so we define it using QUiLoader, and + # then make sure we expose that function. This is adapted from qt-helpers + # which was released under a 3-clause BSD license: + # qt-helpers - a common front-end to various Qt modules + # + # Copyright (c) 2015, Chris Beaumont and Thomas Robitaille + # + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions are + # met: + # + # * Redistributions of source code must retain the above copyright + # notice, this list of conditions and the following disclaimer. + # * Redistributions in binary form must reproduce the above copyright + # notice, this list of conditions and the following disclaimer in the + # documentation and/or other materials provided with the + # distribution. + # * Neither the name of the Glue project nor the names of its contributors + # may be used to endorse or promote products derived from this software + # without specific prior written permission. + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS + # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR + # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # + # Which itself was based on the solution at + # + # https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8 + # + # which was released under the MIT license: + # + # Copyright (c) 2011 Sebastian Wiesner + # Modifications by Charl Botha + # + # Permission is hereby granted, free of charge, to any person obtaining a + # copy of this software and associated documentation files (the "Software"), + # to deal in the Software without restriction, including without limitation + # the rights to use, copy, modify, merge, publish, distribute, sublicense, + # and/or sell copies of the Software, and to permit persons to whom the + # Software is furnished to do so, subject to the following conditions: + # + # The above copyright notice and this permission notice shall be included in + # all copies or substantial portions of the Software. + # + # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + # DEALINGS IN THE SOFTWARE. + + if PYSIDE: + from PySide.QtCore import QMetaObject + from PySide.QtUiTools import QUiLoader + elif PYSIDE2: + from PySide2.QtCore import QMetaObject + from PySide2.QtUiTools import QUiLoader + + class UiLoader(QUiLoader): + """ + Subclass of :class:`~PySide.QtUiTools.QUiLoader` to create the user + interface in a base instance. + + Unlike :class:`~PySide.QtUiTools.QUiLoader` itself this class does not + create a new instance of the top-level widget, but creates the user + interface in an existing instance of the top-level class if needed. + + This mimics the behaviour of :func:`PyQt4.uic.loadUi`. + """ + + def __init__(self, baseinstance, customWidgets=None): + """ + Create a loader for the given ``baseinstance``. + + The user interface is created in ``baseinstance``, which must be an + instance of the top-level class in the user interface to load, or a + subclass thereof. + + ``customWidgets`` is a dictionary mapping from class name to class + object for custom widgets. Usually, this should be done by calling + registerCustomWidget on the QUiLoader, but with PySide 1.1.2 on + Ubuntu 12.04 x86_64 this causes a segfault. + + ``parent`` is the parent object of this loader. + """ + + QUiLoader.__init__(self, baseinstance) + + self.baseinstance = baseinstance + + if customWidgets is None: + self.customWidgets = {} + else: + self.customWidgets = customWidgets + + def createWidget(self, class_name, parent=None, name=''): + """ + Function that is called for each widget defined in ui file, + overridden here to populate baseinstance instead. + """ + + if parent is None and self.baseinstance: + # supposed to create the top-level widget, return the base + # instance instead + return self.baseinstance + + else: + + # For some reason, Line is not in the list of available + # widgets, but works fine, so we have to special case it here. + if class_name in self.availableWidgets() or class_name == 'Line': + # create a new widget for child widgets + widget = QUiLoader.createWidget(self, class_name, parent, name) + + else: + # If not in the list of availableWidgets, must be a custom + # widget. This will raise KeyError if the user has not + # supplied the relevant class_name in the dictionary or if + # customWidgets is empty. + try: + widget = self.customWidgets[class_name](parent) + except KeyError: + raise Exception('No custom widget ' + class_name + ' ' + 'found in customWidgets') + + if self.baseinstance: + # set an attribute for the new child widget on the base + # instance, just like PyQt4.uic.loadUi does. + setattr(self.baseinstance, name, widget) + + return widget + + def _get_custom_widgets(ui_file): + """ + This function is used to parse a ui file and look for the + section, then automatically load all the custom widget classes. + """ + + import sys + import importlib + from xml.etree.ElementTree import ElementTree + + # Parse the UI file + etree = ElementTree() + ui = etree.parse(ui_file) + + # Get the customwidgets section + custom_widgets = ui.find('customwidgets') + + if custom_widgets is None: + return {} + + custom_widget_classes = {} + + for custom_widget in custom_widgets.getchildren(): + + cw_class = custom_widget.find('class').text + cw_header = custom_widget.find('header').text + + module = importlib.import_module(cw_header) + + custom_widget_classes[cw_class] = getattr(module, cw_class) + + return custom_widget_classes + + def loadUi(uifile, baseinstance=None, workingDirectory=None): + """ + Dynamically load a user interface from the given ``uifile``. + + ``uifile`` is a string containing a file name of the UI file to load. + + If ``baseinstance`` is ``None``, the a new instance of the top-level + widget will be created. Otherwise, the user interface is created within + the given ``baseinstance``. In this case ``baseinstance`` must be an + instance of the top-level widget class in the UI file to load, or a + subclass thereof. In other words, if you've created a ``QMainWindow`` + interface in the designer, ``baseinstance`` must be a ``QMainWindow`` + or a subclass thereof, too. You cannot load a ``QMainWindow`` UI file + with a plain :class:`~PySide.QtGui.QWidget` as ``baseinstance``. + + :method:`~PySide.QtCore.QMetaObject.connectSlotsByName()` is called on + the created user interface, so you can implemented your slots according + to its conventions in your widget class. + + Return ``baseinstance``, if ``baseinstance`` is not ``None``. Otherwise + return the newly created instance of the user interface. + """ + + # We parse the UI file and import any required custom widgets + customWidgets = _get_custom_widgets(uifile) + + loader = UiLoader(baseinstance, customWidgets) + + if workingDirectory is not None: + loader.setWorkingDirectory(workingDirectory) + + widget = loader.load(uifile) + QMetaObject.connectSlotsByName(widget) + return widget diff --git a/python3.9libs/searcher/.extra/Untitled.json b/python3.9libs/searcher/.extra/Untitled.json new file mode 100644 index 0000000..887b166 --- /dev/null +++ b/python3.9libs/searcher/.extra/Untitled.json @@ -0,0 +1,14 @@ +{ + "env": [ + { + "MODELER": "C:/Users/PATH/TO/modeler/" + }, + { + "HOUDINI_PATH" : + { + "value": "$MODELER/", + "method": "append" + } + } + ] +} \ No newline at end of file diff --git a/python3.9libs/searcher/.extra/__init__.py b/python3.9libs/searcher/.extra/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python3.9libs/searcher/.extra/bug.html b/python3.9libs/searcher/.extra/bug.html new file mode 100644 index 0000000..33c26fa --- /dev/null +++ b/python3.9libs/searcher/.extra/bug.html @@ -0,0 +1,25 @@ + + + + + + + + Demo + + + + + + + + + + \ No newline at end of file diff --git a/python3.9libs/searcher/.extra/bugsubmit.html b/python3.9libs/searcher/.extra/bugsubmit.html new file mode 100644 index 0000000..d01f5d7 --- /dev/null +++ b/python3.9libs/searcher/.extra/bugsubmit.html @@ -0,0 +1,20 @@ + + + + + + ISSUE_TITLE + + + + + + + + \ No newline at end of file diff --git a/python3.9libs/searcher/.extra/scratch b/python3.9libs/searcher/.extra/scratch new file mode 100644 index 0000000..f1527a4 --- /dev/null +++ b/python3.9libs/searcher/.extra/scratch @@ -0,0 +1,324 @@ +# C:\Users\mosthated\AppData\Roaming\Python\Python37\Scripts\pyuic5.exe .\SearcherSettings.ui -o .\SearcherSettings.py + + # panetab = None + # for pane in hou.ui.floatingPaneTabs(): + # if pane.type() == hou.paneTabType.PythonPanel: + # print(pane.activeInterface().name()) + # if pane.activeInterface().name() == 'SceneGraphDetailsPanel': + # panetab = pane + # break + + # ret = [] + # for t in tabs: + # if t.type() == hou.paneTabType.PythonPanel: + # if t.activeInterface() == pytype: + # t.pane().setIsSplitMaximized(False) + # ret.append(t) + + +# else: +# os.environ['QT_API'] = 'pyside2' +# from PySide import QtUiTools +# from qtpy import QtGui +# from qtpy import QtCore +# from qtpy import QtWidgets +# endregion + + + + # ------------------------------------- checkforchanges + def checkforchanges(self): + for i in range(len(util.SETTINGS_KEYS)): + if util.SETTINGS_TYPES[util.SETTINGS_KEYS[i]] == "bool": + if self.isdebug and self.isdebug.level in {"ALL"}: + print("Get attribute: ", getattr(self, util.SETTINGS_KEYS[i])) + print("Get settings: ", bc(self.currentsettings[util.SETTINGS_KEYS[i]])) + if getattr(self, util.SETTINGS_KEYS[i]).isChecked() != bc(self.currentsettings[util.SETTINGS_KEYS[i]]): + if self.isdebug and self.isdebug.level in {"ALL"}: + print("{} value {}".format(util.SETTINGS_KEYS[i], getattr(self, util.SETTINGS_KEYS[i]).isChecked())) + print("{} value {}".format(util.SETTINGS_KEYS[i], bc(self.currentsettings[util.SETTINGS_KEYS[i]]))) + return True + elif util.SETTINGS_TYPES[util.SETTINGS_KEYS[i]] == "text": + if self.isdebug and self.isdebug.level in {"ALL"}: + print("Get attribute: ", getattr(self, util.SETTINGS_KEYS[i])) + print("Get settings: ", self.currentsettings[util.SETTINGS_KEYS[i]]) + if getattr(self, util.SETTINGS_KEYS[i]).text() != self.currentsettings[util.SETTINGS_KEYS[i]]: + if self.isdebug and self.isdebug.level in {"ALL"}: + print("{} value {}".format(util.SETTINGS_KEYS[i], getattr(self, util.SETTINGS_KEYS[i]).text())) + print("{} value {}".format(util.SETTINGS_KEYS[i], self.currentsettings[util.SETTINGS_KEYS[i]])) + return True + elif util.SETTINGS_TYPES[util.SETTINGS_KEYS[i]] == "intval": + if self.isdebug and self.isdebug.level in {"ALL"}: + print("Get attribute: ", getattr(self, util.SETTINGS_KEYS[i])) + print("Get settings: ", self.currentsettings[util.SETTINGS_KEYS[i]]) + if getattr(self, util.SETTINGS_KEYS[i]).value() != self.currentsettings[util.SETTINGS_KEYS[i]]: + if self.isdebug and self.isdebug.level in {"ALL"}: + print("{} value {}".format(util.SETTINGS_KEYS[i], getattr(self, util.SETTINGS_KEYS[i]).value())) + print("{} value {}".format(util.SETTINGS_KEYS[i], int(self.currentsettings[util.SETTINGS_KEYS[i]]))) + return True + elif util.SETTINGS_TYPES[util.SETTINGS_KEYS[i]] == "cbx": + if self.isdebug and self.isdebug.level in {"ALL"}: + print("Get attribute: ", getattr(self, util.SETTINGS_KEYS[i])) + print("Get settings: ", self.currentsettings[util.SETTINGS_KEYS[i]]) + if getattr(self, util.SETTINGS_KEYS[i]).currentText() != self.currentsettings[util.SETTINGS_KEYS[i]]: + if self.isdebug and self.isdebug.level in {"ALL"}: + print("{} value {}".format(util.SETTINGS_KEYS[i], getattr(self, util.SETTINGS_KEYS[i]).currentText())) + print("{} value {}".format(util.SETTINGS_KEYS[i], str(self.currentsettings[util.SETTINGS_KEYS[i]]))) + return True + return False + + + def bug_cb(self, toggled): + if toggled == True and not self.bugreport.isVisible(): + if self.animatedsettings.isChecked(): + pos = self.bugreportbtn.mapToGlobal( + QtCore.QPoint( -43, 34)) + else: + pos = self.bugreportbtn.mapToGlobal( + QtCore.QPoint( -45, 35)) + self.bugreport.setGeometry( + pos.x(), + pos.y(), + self.bugreport.width(), + self.bugreport.height() + ) + self.bugreport.show() + else: + self.bugreport.close() + + def theme_cb(self, toggled): + if toggled == True and not self.theme.isVisible(): + if self.animatedsettings.isChecked(): + pos = self.themebtn.mapToGlobal( + QtCore.QPoint( -77, 34)) + else: + pos = self.themebtn.mapToGlobal( + QtCore.QPoint( -79, 35)) + self.theme.setGeometry( + pos.x(), + pos.y(), + self.theme.width(), + self.theme.height() + ) + self.theme.show() + else: + self.theme.close() + + def about_cb(self): + self.aboutui = about.About(self.parentwindow) + self.aboutui.setAttribute(QtCore.Qt.WA_StyledBackground, True) + self.aboutui.setWindowFlags( + QtCore.Qt.Popup | + QtCore.Qt.WindowStaysOnTopHint | + QtCore.Qt.NoDropShadowWindowHint | + QtCore.Qt.WindowStaysOnTopHint + ) + self.aboutui.setParent(self.parentwindow) + self.aboutui.move(self.pos().x() - 175, self.pos().y()) + self.aboutui.show() + + +# ------------------------------------------------------- CUSTOM FONT LOADER +style script ---------- +def getfontdb(): + return getattr(hou.session, "FONTDB", None) + +def getfont(font, size = -1, weight = -1): + if font in getfontdb().families(): + fontstr = "" + fontstr += ("Using %s" % font) + if size >= 0: + fontstr += (" : Size %s" % size) + if weight >= 0: + fontstr += (" : Weight %s" % weight) + else: + if hou.isUIAvailable(): + hou.ui.setStatusMessage(("%s not available" % font), severity=hou.severityType.Message) + return QtGui.QFont() + else: + print(("%s not available" % font)) + return QtGui.QFont() + + return QtGui.QFont(font, size, weight) +------- + + # CUSTOM FONT LOADER + self.infolbl = self.sui.info_lbl + self.infolbl_font = style.getfont("JetBrains Mono", 8) + self.infolbl_font.setWeight(40) + self.infolbl_font.setLetterSpacing(QtGui.QFont.AbsoluteSpacing, -1) + self.infolbl_font.setWordSpacing(-2) + # self.infolbl.setFont(self.infolbl_font) + + self.treetotal_lbl = self.sui.treetotal_lbl + self.treetotal_lbl_font = style.getfont("JetBrains Mono", 8) + self.treetotal_lbl_font.setWeight(40) + self.treetotal_lbl_font.setLetterSpacing(QtGui.QFont.AbsoluteSpacing, -1) + self.treetotal_lbl_font.setWordSpacing(0) + # self.treetotal_lbl.setFont(self.treetotal_lbl_font) + # self.treetotal_lbl.setMinimumSize(QtCore.QSize(50, 0)) + + # ------------------------------------------------------- Spacing + # tnum = str(treeitemsnum) + # goalnum = 5 + # if len(tnum) == 3: + # itmvald = tnum.rjust((goalnum + 2) - count_chars(str(treeitemsnum)), " ") + # else: + # itmvald = tnum.rjust((goalnum) - count_chars(str(treeitemsnum)), " ") + # itm = itmvald.replace(" ", " ") + + # ------------------------------------------------------- Auto - Add font + jbfont = getattr(hou.session, "FONTDB", None) + if not getattr(hou.session, "FONTDB", None): + fontlocation = os.path.join(script_path, "fonts") + jbfontfolder = "JetBrainsMono-1.0.0" + f = [] + for (dirpath, dirnames, filenames) in os.walk(os.path.join(fontlocation, jbfontfolder)): + f.extend(filenames) + break + if f: + for font in f: + fontdb = QtGui.QFontDatabase() + fontdb.addApplicationFont(os.path.join(fontlocation, jbfontfolder, font)) + print("Added %s" % font) + hou.session.FONTDB = fontdb + + + + # # ----------------------------------------- chooseColor + # # NOTE chooseColor ------------------------------------ + # def chooseColor(self): + # sender = self.sender() + # name = sender.objectName() + # colorfield = getattr(self.ui, name) + + # qcolor = QtGui.QColor() + # qcolor.setNamedColor(colorfield.text()) + + # color = hou.Color() + # color.setRGB(( + # qcolor.redF(), + # qcolor.greenF(), + # qcolor.blueF()) + # ) + + # result = hou.ui.selectColor(initial_color = color) + # allWidgets = QtWidgets.QApplication.allWidgets() + # for w in allWidgets: + # if "Select Color" in w.windowTitle(): + # pos = self.parent.mapToGlobal( + # QtCore.QPoint(-self.parent.width(), -self.parent.height())) + # w.setGeometry( + # pos.x(), + # pos.y(), + # w.width(), + # w.height() + # ) + # # w.move(self.parent.width()200, 100) + + # if result: + # rgb = result.rgb() + # newcolor = QtGui.QColor( + # rgb[0]*255, + # rgb[1]*255, + # rgb[2]*255 + # ) + + # if newcolor.isValid(): + # colorfield.setText(newcolor.name()) + # sender.setStyleSheet("background-color:" + colorfield.text()) + + + + + + # ----------------------------------------- chooseColor + # NOTE chooseColor ------------------------------------ + def chooseColor(self): + sender = self.sender() + + self.name = sender.objectName() + self.colorfield[self.name] = (getattr(self.ui, self.name), sender) + + qcolor = QtGui.QColor() + qcolor.setNamedColor(self.colorfield[self.name][0].text()) + + # color = hou.Color() + # color = QtGui.QColor() + # color.setRGB(( + # qcolor.redF(), + # qcolor.greenF(), + # qcolor.blueF()) + # ) + + # hou.ui.openColorEditor(self.colorchange_cb, include_alpha=False, initial_color = color) + # hd.executeDeferred(self._opencoloreditor, color) + colord = QtWidgets.QColorDialog(self) + colord.setModal(False) + pos = self.parentwindow.mapToGlobal( + QtCore.QPoint(self.parentwindow.width(), self.parentwindow.height())) + colord.move( + pos.x() + 300, + pos.y(), + ) + colord.getColor( + initial=qcolor, + parent=self, + options=QtWidgets.QColorDialog.DontUseNativeDialog + ) + # colord.setWindowFlags( + # QtCore.Qt.CustomizeWindowHint + # # | QtCore.Qt.WindowStaysOnTopHint + # # | QtCore.Qt.X11BypassWindowManagerHint + # ) + # self.activateWindow() + # self._opencoloreditor(color) + + + + # NOTE PANES --------------------------- + # hou.playbar.moveToBottom() + + # for ii in i: + # print(ii) + # print("Under mouse: ", i.windowTitle()) + print("--------------------------------------") + current_desktop = hou.ui.curDesktop() + # allpanes = current_desktop.panes() + # if allpanes: + # try: + # self.processdesktop("current_desktop.panes()", allpanes) + # except: + # pass + # panetabsd = current_desktop.paneTabs() + # panetabs = hou.ui.paneTabs() + # if panetabs: + # try: + # self.processdesktop("current_desktop.paneTabs()", panetabsd) + # self.processdesktop("hou.ui.paneTabs()", panetabs) # ------ Good one + # except: + # pass + # floating = hou.ui.floatingPanels() + # if floating: + # try: + # self.processdesktop("current_desktop.floatingPanels()", floating) + # print(floating.name()) + # except: + # pass + # desktoppane = current_desktop.paneUnderCursor() + # if desktoppane: + # try: + # self.processdesktop("current_desktop.paneUnderCursor()", desktoppane) + # except: + # pass + # desktoptab = current_desktop.paneTabUnderCursor() + # if desktoptab: + # try: + # self.processdesktop("current_desktop.paneTabUnderCursor()", desktoptab) + # except: + # pass + + allWidgets = QtWidgets.QApplication.allWidgets() + for w in allWidgets: + if "playbar" in w.windowTitle(): + print("Is Playbar? ", w.windowTitle(), w) \ No newline at end of file diff --git a/python3.9libs/searcher/.extra/session.py b/python3.9libs/searcher/.extra/session.py new file mode 100644 index 0000000..949cca5 --- /dev/null +++ b/python3.9libs/searcher/.extra/session.py @@ -0,0 +1,111 @@ + + +import os +import time + +# attempt to import hou module. if it fails, not in a houdini session. +try: + import hou +except ImportError: + HOU_IMPORTED = False +else: + HOU_IMPORTED = True + +# ----------------------------------------------------------------------------- +# attempt to import hou ui module. if it fails, not in the UI +try: + from PySide import QtCore, QtGui +except: + HOU_UI_IMPORTED = False +else: + HOU_UI_IMPORTED = True + +from dpa.app.session import RemoteMixin, Session, SessionRegistry, SessionError + +# ----------------------------------------------------------------------------- +class HoudiniSession(RemoteMixin, Session): + + app_name = 'houdini' + + # XXX should come from config + SERVER_EXECUTABLE = "/home/jtomlin/dev/dpa-pipe/bin/dpa_houdini_server" + + # ------------------------------------------------------------------------- + @classmethod + def current(cls): + if not HOU_IMPORTED: + return None + return cls() + + # ------------------------------------------------------------------------- + def __init__(self, filepath=None, remote=False): + + super(HoudiniSession, self).__init__(remote=remote) + + self._hou = self.init_module('hou') + + if filepath: + self.open_file(filepath) + + # ------------------------------------------------------------------------- + def close(self): + if self.remote_connection: + self.shutdown() + else: + self.hou.hipFile.clear() + + # ------------------------------------------------------------------------- + def open_file(self, filepath): + + if not os.path.exists(filepath): + raise SessionError( + "Can not open '{f}'. File does not exist.".format(f=filepath)) + + try: + self.hou.hipFile.load(filepath) + except RuntimeError as e: + raise SessionError(str(e)) + + # ------------------------------------------------------------------------- + def save(self, filepath=None, overwrite=False): + + if filepath and os.path.exists(filepath) and not overwrite: + raise SessionError( + "Can not save '{f}'. File exists.".format(f=filepath)) + + self.hou.hipFile.save(file_name=filepath) + + # ------------------------------------------------------------------------- + @property + def hou(self): + return self._hou + + # ------------------------------------------------------------------------- + @property + def in_session(self): + """Returns True if inside a current app session.""" + return HOU_IMPORTED or self.remote_connection + + # ------------------------------------------------------------------------- + @property + def main_window(self): + + if not HOU_UI_IMPORTED: + return None + + return QtGui.QApplication.activeWindow() + + # ------------------------------------------------------------------------- + @property + def name(self): + """Returns the name of the application.""" + return "houdini" + + # ------------------------------------------------------------------------- + @property + def server_executable(self): + return self.__class__.SERVER_EXECUTABLE + + +# ----------------------------------------------------------------------------- +SessionRegistry().register(HoudiniSession) diff --git a/python3.9libs/searcher/.extra/shelf.py b/python3.9libs/searcher/.extra/shelf.py new file mode 100644 index 0000000..e5a4406 --- /dev/null +++ b/python3.9libs/searcher/.extra/shelf.py @@ -0,0 +1,55 @@ +import hou + + +def create_shelf_current_desktop(shelf_obj): + # Add the shelf to the current desktop. + desktop = hou.ui.curDesktop() + dock = desktop.shelfDock() + shelfSets = dock.shelfSets() + shelfSet = shelfSets[0] + if shelf_obj not in shelfSet.shelves(): + shelfSet.setShelves(shelfSet.shelves() + (shelf_obj, )) + return shelf_obj + +def is_shelf_created(shelf_name, shelf_tab="shelf_set_1"): + try: + shelf_set = hou.shelves.shelfSets()[shelf_tab] + except KeyError: + print("Key not Found! for shelf.") + return False + return shelf_name in shelf_set.shelves() + +def create_shelf_under_tab(shelf_name, shelf_tab="shelf_set_1"): + shelves = hou.shelves.shelves() + _shelf = shelves.get(shelf_name) + if not _shelf: + _shelf = hou.shelves.newShelf(name=shelf_name, label=shelf_name) + try: + shelf_set = hou.shelves.shelfSets()[shelf_tab] + except KeyError: + print("Key not Found! for shelf.") + return 0 + shelf_set.setShelves(shelf_set.shelves() + (_shelf, )) + return _shelf + +def create_tool_under_shelf(shelf_obj, tool_name): + # Clear the existing tools + shelf_obj.setTools(()) + + tools = hou.shelves.tools() + tool = tools.get(tool_name) + if not tool: + tool = hou.shelves.newTool(name=tool_name) + + # Set up the tool. + tool.setLabel(tool_name) + # tool.setScript('from houtools.shelf import dispatch; dispatch(%r)' % spec['entrypoint']) + shelf_obj.setTools(shelf_obj.tools() + (tool, )) + +def run_shelf_creation(): + print("Trying to create the shelf.") + if is_shelf_created("BpCustom"): + print("Not running shelf creation as its present.") + return 1 + shelf_obj = create_shelf_under_tab("BpCustom") + create_tool_under_shelf(shelf_obj, "BpTool1") \ No newline at end of file diff --git a/python3.9libs/searcher/.vscode/settings.json b/python3.9libs/searcher/.vscode/settings.json new file mode 100644 index 0000000..e657cfe --- /dev/null +++ b/python3.9libs/searcher/.vscode/settings.json @@ -0,0 +1,12 @@ +{ + "python.pythonPath": "C:\\Python27\\python.exe", + "python.formatting.provider": "autopep8", + "python.linting.pylintEnabled": false, + "python.linting.flake8Enabled": true, + "python.linting.enabled": true, + "cSpell.words": [ + "gocommandtext", + "updatetmphk", + "updatetmphotkey" + ] +} \ No newline at end of file diff --git a/python3.9libs/searcher/HelpButton.py b/python3.9libs/searcher/HelpButton.py new file mode 100644 index 0000000..0e8151a --- /dev/null +++ b/python3.9libs/searcher/HelpButton.py @@ -0,0 +1,61 @@ +from __future__ import print_function +from __future__ import absolute_import +from searcher import util + +import hou +import os + +hver = 0 +if os.environ["HFS"] != "": + ver = os.environ["HFS"] + # hver = int(ver[ver.rindex('.')+1:]) + from hutil.Qt import QtCore + from hutil.Qt import QtWidgets +else: + from qtpy import QtCore + from qtpy import QtWidgets + + +# ---------------------------------------------------- Help +# NOTE Help ----------------------------------------------- +class HelpButton(QtWidgets.QToolButton): + """Generic Help button.""" + + def __init__(self, name, tooltip, size, searcher, parent=None): + super(HelpButton, self).__init__(parent=parent) + + self.parentWindow = searcher + self._name = name + self.setToolTip(tooltip) + self.clicked.connect(self.display_help) + help_button_size = hou.ui.scaledSize(size) + self.setProperty("flat", True) + self.setIcon(hou.qt.createIcon(util.get_path(["images", "help1.png"]))) + self.setIconSize(QtCore.QSize( + help_button_size, + help_button_size + )) + + def display_help(self): + """Display help panel.""" + # Look for an existing, float help browser. + for pane_tab in hou.ui.paneTabs(): + if isinstance(pane_tab, hou.HelpBrowser): + if pane_tab.isFloating(): + browser = pane_tab + break + + # Didn't find one, so create a new floating browser. + else: + desktop = hou.ui.curDesktop() + + posx = self.parentWindow.pos().x() + posy = self.parentWindow.pos().y() + sizew = self.parentWindow.width() + sizeh = self.parentWindow.height() + + browser = desktop.createFloatingPaneTab( + hou.paneTabType.HelpBrowser, position=(posx + sizew / 8, posy - (sizeh / 2)), size=(805, 650)) + self.parentWindow.close() + + browser.displayHelpPath("/searcher/{}".format(self._name)) diff --git a/python3.9libs/searcher/__init__.py b/python3.9libs/searcher/__init__.py new file mode 100644 index 0000000..66960fc --- /dev/null +++ b/python3.9libs/searcher/__init__.py @@ -0,0 +1,7 @@ +__package__ = "searcher" +__author__ = "instance.id" +__copyright__ = "2020 All rights reserved. See LICENSE for more details." +__status__ = "Prototype" + +from . searcher import * +# from settings_data import * \ No newline at end of file diff --git a/python3.9libs/searcher/about.py b/python3.9libs/searcher/about.py new file mode 100644 index 0000000..50aff6d --- /dev/null +++ b/python3.9libs/searcher/about.py @@ -0,0 +1,115 @@ +from __future__ import absolute_import +from searcher import about_ui +from searcher import util +import os +import hou + +hver = 0 +if os.environ["HFS"] != "": + ver = os.environ["HFS"] + # hver = int(ver[ver.rindex('.')+1:]) + from hutil.Qt import QtGui + from hutil.Qt import QtCore + from hutil.Qt import QtWidgets +else: + from qtpy import QtGui + from qtpy import QtCore + from qtpy import QtWidgets + +scriptpath = os.path.dirname(os.path.realpath(__file__)) + + +# noinspection PyCallByClass,PyUnresolvedReferences +class About(QtWidgets.QWidget): + """ Searcher Settings and Debug Menu""" + + def __init__(self, parent=None): + super(About, self).__init__(parent=parent) + self.setParent(parent) + self.parentwindow = parent + self.ui = about_ui.Ui_About() + self.ui.setupUi(self) + self.ui.retranslateUi(self) + + self.ui.web_icon.setIcon(hou.qt.createIcon( + util.get_path(["images", "icons", "firefox-browser-brands.svg"]))) + self.ui.web_icon.clicked.connect(self.openWeb) + web_icon_size = hou.ui.scaledSize(16) + self.ui.web_icon.setProperty("flat", True) + self.ui.web_icon.setIconSize(QtCore.QSize( + web_icon_size, + web_icon_size + )) + + self.ui.github_icon.setIcon(hou.qt.createIcon( + util.get_path(["images", "icons", "github-brands.svg"]))) + self.ui.github_icon.clicked.connect(self.openGithub) + github_icon_size = hou.ui.scaledSize(16) + self.ui.github_icon.setProperty("flat", True) + self.ui.github_icon.setIconSize(QtCore.QSize( + github_icon_size, + github_icon_size + )) + + self.ui.twitter_icon.setIcon(hou.qt.createIcon( + util.get_path(["images", "icons", "twitter-brands.svg"]))) + self.ui.twitter_icon.clicked.connect(self.openTwitter) + twitter_icon_size = hou.ui.scaledSize(16) + self.ui.twitter_icon.setProperty("flat", True) + self.ui.twitter_icon.setIconSize(QtCore.QSize( + twitter_icon_size, + twitter_icon_size + )) + + self.ui.email_icon.setIcon(hou.qt.createIcon( + util.get_path(["images", "icons", "at-solid.svg"]))) + self.ui.email_icon.clicked.connect(self.openEmail) + email_icon_size = hou.ui.scaledSize(16) + self.ui.email_icon.setProperty("flat", True) + self.ui.email_icon.setIconSize(QtCore.QSize( + email_icon_size, + email_icon_size + )) + + self.ui.web.mousePressEvent = self.openWeb + self.ui.github.mousePressEvent = self.openGithub + self.ui.twitter.mousePressEvent = self.openTwitter + self.ui.email.mousePressEvent = self.openEmail + + self.installEventFilter(self) + + def initmenu(self): + return + + def openWeb(self, _): + weburl = '''https://instance.id/''' + QtGui.QDesktopServices.openUrl(QtCore.QUrl(weburl)) + self.parentwindow.parentwindow.close() + + def openGithub(self, _): + ghurl = '''https://github.com/instance-id/''' + QtGui.QDesktopServices.openUrl(QtCore.QUrl(ghurl)) + self.parentwindow.parentwindow.close() + + def openTwitter(self, _): + twitterurl = '''https://twitter.com/instance_id''' + QtGui.QDesktopServices.openUrl(QtCore.QUrl(twitterurl)) + self.parentwindow.parentwindow.close() + + def openEmail(self, _): + email = '''mailto:support@instance.id''' + QtGui.QDesktopServices.openUrl(QtCore.QUrl(email)) + self.parentwindow.parentwindow.close() + + # ------------------------------------------------------------- Events + # SECTION Events ----------------------------------------------------- + def eventFilter(self, obj, event): + event_type = event.type() + + # ---------------------------------------- Keypress + # NOTE Keypress ----------------------------------- + if event_type == QtCore.QEvent.KeyPress: + if event.key() == QtCore.Qt.Key_Escape: + self.parentwindow.closeroutine() + + return QtCore.QObject.eventFilter(self, obj, event) diff --git a/python3.9libs/searcher/about_ui.py b/python3.9libs/searcher/about_ui.py new file mode 100644 index 0000000..9263ac3 --- /dev/null +++ b/python3.9libs/searcher/about_ui.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +import os +hver = 0 +if os.environ["HFS"] != "": + ver = os.environ["HFS"] + # hver = int(ver[ver.rindex('.')+1:]) + from hutil.Qt import QtGui + from hutil.Qt import QtCore + from hutil.Qt import QtWidgets +else: + from qtpy import QtGui + from qtpy import QtCore + from qtpy import QtWidgets + +scriptpath = os.path.dirname(os.path.realpath(__file__)) + + +# noinspection PyAttributeOutsideInit,DuplicatedCode,PyPep8Naming +class Ui_About(object): + def setupUi(self, About): + About.setObjectName("About") + About.setWindowModality(QtCore.Qt.NonModal) + About.resize(185, 251) + sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred) + sizePolicy.setHorizontalStretch(0) + sizePolicy.setVerticalStretch(0) + sizePolicy.setHeightForWidth(About.sizePolicy().hasHeightForWidth()) + About.setSizePolicy(sizePolicy) + About.setMinimumSize(QtCore.QSize(100, 0)) + About.setBaseSize(QtCore.QSize(0, 0)) + About.setStyleSheet("") + + # ------------------------------------------------- gridsetup + # NOTE gridsetup -------------------------------------------- + self.gridLayout = QtWidgets.QGridLayout(About) + self.gridLayout.setContentsMargins(-1, -1, -1, 6) + self.gridLayout.setSpacing(6) + self.gridLayout.setObjectName("gridLayout") + self.verticalLayout_4 = QtWidgets.QVBoxLayout() + self.verticalLayout_4.setObjectName("verticalLayout_4") + self.horizontalLayout = QtWidgets.QHBoxLayout() + self.horizontalLayout.setObjectName("horizontalLayout") + self.verticalLayout = QtWidgets.QVBoxLayout() + self.verticalLayout.setObjectName("verticalLayout") + spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) + self.verticalLayout.addItem(spacerItem) + + # ------------------------------------------------- secondrow + # NOTE Second Row ------------------------------------------- + self.secondrow = QtWidgets.QHBoxLayout() + self.secondrow.setObjectName("secondrow") + self.web_icon = QtWidgets.QToolButton(About) + self.web_icon.setObjectName("web_icon") + self.web = QtWidgets.QLabel(About) + self.web.setObjectName("web") + self.secondrow.addWidget(self.web_icon) + self.secondrow.addWidget(self.web) + self.verticalLayout.addLayout(self.secondrow) + + # -------------------------------------------------- thirdrow + # NOTE Third Row -------------------------------------------- + self.thirdrow = QtWidgets.QHBoxLayout() + self.thirdrow.setObjectName("thirdrow") + self.github_icon = QtWidgets.QToolButton(About) + self.github_icon.setObjectName("github_icon") + self.github = QtWidgets.QLabel(About) + self.github.setObjectName("github") + self.thirdrow.addWidget(self.github_icon) + self.thirdrow.addWidget(self.github) + self.verticalLayout.addLayout(self.thirdrow) + + # ------------------------------------------------- fourthrow + # NOTE fourthrow -------------------------------------------- + self.fourthrow = QtWidgets.QHBoxLayout() + self.fourthrow.setObjectName("fourthrow") + self.twitter_icon = QtWidgets.QToolButton(About) + self.twitter_icon.setObjectName("twitter_icon") + self.twitter = QtWidgets.QLabel(About) + self.twitter.setObjectName("twitter") + self.fourthrow.addWidget(self.twitter_icon) + self.fourthrow.addWidget(self.twitter) + self.verticalLayout.addLayout(self.fourthrow) + + # ------------------------------------------------- fifthrow + # NOTE fifthrow -------------------------------------------- + self.fifthrow = QtWidgets.QHBoxLayout() + self.fifthrow.setObjectName("fifthrow") + self.email_icon = QtWidgets.QToolButton(About) + self.email_icon.setObjectName("email_icon") + self.email = QtWidgets.QLabel(About) + self.email.setObjectName("email") + self.fifthrow.addWidget(self.email_icon) + self.fifthrow.addWidget(self.email) + self.verticalLayout.addLayout(self.fifthrow) + + # ----------------------------------------------- columnsetup + # NOTE columnsetup ------------------------------------------ + self.horizontalLayout.addLayout(self.verticalLayout) + self.verticalLayout_4.addLayout(self.horizontalLayout) + self.gridLayout.addLayout(self.verticalLayout_4, 0, 0, 1, 1) + + # ----------------------------------------------------- image + # NOTE image --- -------------------------------------------- + self.logo = QtWidgets.QLabel(About) + sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed) + sizePolicy.setHorizontalStretch(0) + sizePolicy.setVerticalStretch(0) + sizePolicy.setHeightForWidth(self.logo.sizePolicy().hasHeightForWidth()) + self.logo.setSizePolicy(sizePolicy) + self.logo.setMaximumSize(QtCore.QSize(120, 120)) + self.logo.setText("") + self.logo.setPixmap(QtGui.QPixmap(scriptpath + "/images/logo.png")) + self.logo.setScaledContents(True) + self.logo.setObjectName("logo") + self.gridLayout.addWidget(self.logo, 0, 1, 1, 1) + + self.retranslateUi(About) + QtCore.QMetaObject.connectSlotsByName(About) + + def retranslateUi(self, About): + _translate = QtCore.QCoreApplication.translate + About.setWindowTitle(_translate("About", "Form")) + self.web.setText(_translate("About", 'website')) + self.github.setText(_translate("About", 'github')) + self.twitter.setText(_translate("About", 'twitter')) + self.email.setText(_translate("About", 'email')) + +# class LinkLabel(QtWidgets.QLabel): +# def __init__(self, parent, text): +# super(LinkLabel, self).__init__(parent) + +# self.setText(text) +# self.setTextFormat(QtCore.Qt.RichText) +# self.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction) +# self.setOpenExternalLinks(True) \ No newline at end of file diff --git a/python3.9libs/searcher/animator.py b/python3.9libs/searcher/animator.py new file mode 100644 index 0000000..79f1b09 --- /dev/null +++ b/python3.9libs/searcher/animator.py @@ -0,0 +1,58 @@ +from hutil.Qt import QtCore, QtGui, QtWidgets + + +class Animator(QtWidgets.QWidget): + def __init__(self, parent=None, close_cb=None, animationDuration=200): + super(Animator, self).__init__(parent) + + self.animationDuration = animationDuration + + self.toggleAnimation = QtCore.QParallelAnimationGroup() + if close_cb is not None: + self.toggleAnimation.finished.connect(close_cb) + + self.contentArea = QtWidgets.QScrollArea( + maximumHeight=0, minimumHeight=0, minimumWidth=500) + self.contentArea.setStyleSheet( + "QScrollArea { background-color: rgba(58 58, 58, 1); border: none;}") + self.contentArea.setSizePolicy( + QtWidgets.QSizePolicy.Expanding, + QtWidgets.QSizePolicy.Fixed) + + toggleAnimation = self.toggleAnimation + toggleAnimation.addAnimation( + QtCore.QPropertyAnimation(self, b"minimumHeight")) + toggleAnimation.addAnimation( + QtCore.QPropertyAnimation(self, b"maximumHeight")) + toggleAnimation.addAnimation(QtCore.QPropertyAnimation( + self.contentArea, b"maximumHeight")) + + mainLayout = QtWidgets.QVBoxLayout(self) + mainLayout.setSpacing(0) + mainLayout.setContentsMargins(0, 0, 0, 0) + mainLayout.addWidget(self.contentArea) + + def start_animation(self, checked): + direction = QtCore.QAbstractAnimation.Forward if checked else QtCore.QAbstractAnimation.Backward + self.toggleAnimation.setDirection(direction) + self.toggleAnimation.start() + + def setContentLayout(self, contentLayout): + # Not sure if this is equivalent to self.contentArea.destroy() + lay = self.contentArea.layout() + del lay + self.contentArea.setLayout(contentLayout) + collapsedHeight = self.sizeHint().height() - self.contentArea.maximumHeight() + + contentHeight = contentLayout.sizeHint().height() + for i in range(self.toggleAnimation.animationCount()-1): + expandAnimation = self.toggleAnimation.animationAt(i) + expandAnimation.setDuration(self.animationDuration) + expandAnimation.setStartValue(collapsedHeight) + expandAnimation.setEndValue(collapsedHeight + contentHeight) + + contentAnimation = self.toggleAnimation.animationAt( + self.toggleAnimation.animationCount() - 1) + contentAnimation.setDuration(self.animationDuration) + contentAnimation.setStartValue(0) + contentAnimation.setEndValue(contentHeight) diff --git a/python3.9libs/searcher/bugreport.py b/python3.9libs/searcher/bugreport.py new file mode 100644 index 0000000..91afd0e --- /dev/null +++ b/python3.9libs/searcher/bugreport.py @@ -0,0 +1,102 @@ +from __future__ import absolute_import + +from hutil.py23 import reload + +from searcher import bugreport_ui +import os + +import hou + +hver = 0 +if os.environ["HFS"] != "": + ver = os.environ["HFS"] + # hver = int(ver[ver.rindex('.') + 1:]) + from hutil.Qt import QtGui + from hutil.Qt import QtCore + from hutil.Qt import QtWidgets + +try: + pyside = os.environ['HOUDINI_QT_PREFERRED_BINDING'] + parent = hou.qt.mainWindow() +except KeyError: + parent = hou.ui.mainQtWindow() + pyside = 'PySide' + +if pyside == 'PySide2': + # noinspection PyUnresolvedReferences + from PySide2 import QtWebEngineWidgets + +elif pyside == 'PySide': + # noinspection PyUnresolvedReferences + from PySide.QtWebKit import QWebView + +reload(bugreport_ui) + +scriptpath = os.path.dirname(os.path.realpath(__file__)) + + +def submittypeswitch(argument): + switcher = { + 0: "assignees=&labels=bug&template=bug_report.md&title=", + 1: "assignees=&labels=enhancement&template=feature_request.md&title=", + 2: "assignees=&labels=&template=general-question.md&title=", + } + return switcher.get(argument, "nothing") + + +class BugReport(QtWidgets.QWidget): + """ Searcher Settings and Debug Menu""" + + def __init__(self, parent=None): + super(BugReport, self).__init__(parent=parent) + self.priortext = "" + self.isediting = True + self.setParent(parent) + self.parentwindow = parent + self.ui = bugreport_ui.Ui_BugReport() + self.ui.setupUi(self) + self.ui.retranslateUi(self) + self._webview = None + self.installEventFilter(self) + self.ui.title.installEventFilter(self) + + def initmenu(self): + self.resize(self.width(), self.parentwindow.height() - 300) + self._webview = None + self.ui.title.setText("") + self.ui.edittitle_btn.pressed.connect(self.doweb) + + self.ui.title.setFocus() + + def doweb(self): + if self.ui.title.text() == "": + self.parentwindow.parentwindow.setstatusmsg("Please enter a title for your bug report", "ImportantMessage") + if hou.isUIAvailable(): + hou.ui.setStatusMessage( + "Please enter a title for your bug report.", severity=hou.severityType.Warning) + return + + submittype = submittypeswitch(self.ui.label_cbox.currentIndex()) + reporturl = '''https://github.com/instance-id/searcher_addon/issues/new?%s%s''' % (submittype, self.ui.title.text()) + + QtGui.QDesktopServices.openUrl(QtCore.QUrl(reporturl)) + self.parentwindow.parentwindow.close() + + # ------------------------------------------------------------- Events + # SECTION Events ----------------------------------------------------- + def eventFilter(self, obj, event): + event_type = event.type() + + # ---------------------------------------- Keypress + # NOTE Keypress ----------------------------------- + if event_type == QtCore.QEvent.KeyPress: + self.priortext = self.ui.title.text() + if event.key() == QtCore.Qt.Key_Escape: + self.parentwindow.closeroutine() + return True + + if event_type == QtCore.QEvent.Close: + self._webview = None + self.isediting = True + + return QtCore.QObject.eventFilter(self, obj, event) diff --git a/python3.9libs/searcher/bugreport_ui.py b/python3.9libs/searcher/bugreport_ui.py new file mode 100644 index 0000000..c2ec21a --- /dev/null +++ b/python3.9libs/searcher/bugreport_ui.py @@ -0,0 +1,69 @@ +from hutil.Qt import QtCore, QtGui, QtWidgets +import os + +scriptpath = os.path.dirname(os.path.realpath(__file__)) + + +class Ui_BugReport(object): + def setupUi(self, BugReport): + BugReport.setObjectName("BugReport") + BugReport.setWindowModality(QtCore.Qt.NonModal) + BugReport.resize(450, 20) + sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred) + sizePolicy.setHorizontalStretch(0) + sizePolicy.setVerticalStretch(0) + sizePolicy.setHeightForWidth(BugReport.sizePolicy().hasHeightForWidth()) + BugReport.setSizePolicy(sizePolicy) + BugReport.setMinimumSize(QtCore.QSize(25, 0)) + BugReport.setBaseSize(QtCore.QSize(0, 0)) + BugReport.setContentsMargins(-1, -1, -1,-1) + BugReport.setStyleSheet("") + self.gridLayout = QtWidgets.QGridLayout(BugReport) + self.gridLayout.setContentsMargins(-1, -1, -1, -1) + self.gridLayout.setSpacing(0) + self.gridLayout.setObjectName("gridLayout") + self.verticalLayout_4 = QtWidgets.QVBoxLayout() + self.verticalLayout_4.setObjectName("verticalLayout_4") + self.secondrow = QtWidgets.QHBoxLayout() + self.secondrow.setObjectName("secondrow") + self.title = QtWidgets.QLineEdit(BugReport) + self.title.setMinimumSize(QtCore.QSize(175, 0)) + self.title.setObjectName("title") + self.secondrow.addWidget(self.title) + self.verticalLayout_4.addLayout(self.secondrow) + self.thirdrow = QtWidgets.QHBoxLayout() + self.thirdrow.setObjectName("thirdrow") + spacerItem1 = QtWidgets.QSpacerItem(40, 25, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed) + self.thirdrow.addItem(spacerItem1) + self.label_cbox = QtWidgets.QComboBox(BugReport) + self.label_cbox.setObjectName("label_cbox") + self.label_cbox.setLayoutDirection(QtCore.Qt.LeftToRight) + self.label_cbox.setMaximumSize(QtCore.QSize(100, 25)) + self.label_cbox.addItem("Bug") + self.label_cbox.addItem("Suggestion") + self.label_cbox.addItem("Question") + self.thirdrow.addWidget(self.label_cbox) + spacerItem1 = QtWidgets.QSpacerItem(5, 30, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed) + self.thirdrow.addItem(spacerItem1) + self.edittitle_btn = QtWidgets.QPushButton(BugReport) + sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed) + sizePolicy.setHorizontalStretch(0) + sizePolicy.setVerticalStretch(0) + sizePolicy.setHeightForWidth(self.edittitle_btn.sizePolicy().hasHeightForWidth()) + self.edittitle_btn.setSizePolicy(sizePolicy) + self.edittitle_btn.setMaximumSize(QtCore.QSize(75, 16777215)) + self.edittitle_btn.setObjectName("edittitle") + self.thirdrow.addWidget(self.edittitle_btn) + + self.verticalLayout_4.addLayout(self.thirdrow) + self.gridLayout.addLayout(self.verticalLayout_4, 0, 0, 1, 1) + + self.retranslateUi(BugReport) + QtCore.QMetaObject.connectSlotsByName(BugReport) + + def retranslateUi(self, BugReport): + _translate = QtCore.QCoreApplication.translate + BugReport.setWindowTitle(_translate("BugReport", "Form")) + self.edittitle_btn.setText(_translate("BugReport", "Create")) + self.title.setPlaceholderText(_translate("BugReport", "Please enter descriptive bug report title:")) + # self.continue_btn.setText(_translate("BugReport", "Continue")) diff --git a/python3.9libs/searcher/colorfieldselector.py b/python3.9libs/searcher/colorfieldselector.py new file mode 100644 index 0000000..8a14c10 --- /dev/null +++ b/python3.9libs/searcher/colorfieldselector.py @@ -0,0 +1,121 @@ +import hou + +from hutil.Qt import QtCore +from hutil.Qt import QtGui +from hutil.Qt import QtWidgets + +_MIN_RGB_VALUE = 0 +_MAX_RGB_VALUE = 255 + +class ColorFieldSelector(QtWidgets.QWidget): + """ +hou.qt.ColorField + +A widget for color input. + +The widget contains a color swatch button and an input field for RGBA +values. + +This class inherits from Qt's QtWidgets.QWidget class. + +""" + def __init__(self, label="", include_alpha=False): + """ +__init__(self, label=\"\", include_alpha=False) + + Create and return a new ColorField object. + + + label + If set to a non-empty string then a label is added to the color + field. + + include_alpha + If True, then an alpha component is added to the color field. + +""" + QtWidgets.QWidget.__init__(self) + + layout = QtWidgets.QHBoxLayout() + layout.setSpacing(hou.ui.scaledSize(2)) + layout.setContentsMargins(0, 0, 0, 0) + + self.colorSwatchButton = hou.qt.ColorSwatchButton(include_alpha) + + # Use the color swatch button's colorChanged signal as our own. + self.colorChanged = self.colorSwatchButton.colorChanged + + self.inputField = hou.qt.InputField( + hou.qt.InputField.FloatType, + 4 if include_alpha else 3) + + if label is not None and label != "": + layout.addWidget(hou.qt.FieldLabel(label)) + + layout.addWidget(self.colorSwatchButton) + layout.addSpacing(hou.ui.scaledSize(5)) + layout.addWidget(self.inputField) + + # Connect color swatch button to field so their values + # are always in-sync. + self.colorSwatchButton.colorChanged.connect( + self._updateFieldFromColorSwatch) + self.inputField.valueChanged.connect( + self._updateColorSwatchFromField) + + # Sync input field with color swatch. + self._updateFieldFromColorSwatch(self.colorSwatchButton.color()) + + self.setLayout(layout) + + def color(self): + """ +color() -> QtGui.QColor + + Return the field's current color. + +""" + return self.colorSwatchButton.color() + + def setColor(self, color): + """ +setColor(color) + + Set the field's current color. color must be a QtGui.QColor object. + +""" + self.colorSwatchButton.setColor(color) + + # Update the input field with the new color. + self._updateFieldFromColorSwatch(color) + + def _updateFieldFromColorSwatch(self, color): + if self.colorSwatchButton.hasAlpha(): + self.inputField.setValues([ + color.redF(), color.greenF(), color.blueF(), color.alphaF()]) + else: + self.inputField.setValues([ + color.redF(), color.greenF(), color.blueF()]) + + def _updateColorSwatchFromField(self): + values = list(self.inputField.values()) + + color = QtGui.QColor() + color.setRedF(self._clampRGBValue(values[0])) + color.setGreenF(self._clampRGBValue(values[1])) + color.setBlueF(self._clampRGBValue(values[2])) + + if self.colorSwatchButton.hasAlpha(): + color.setAlphaF(self._clampRGBValue(values[3])) + + self.colorSwatchButton.setColor(color) + + def _clampRGBValue(self, val): + if val > 1.0: + return 1.0 + + if val < 0.0: + return 0.0 + + return val + diff --git a/python3.9libs/searcher/database.py b/python3.9libs/searcher/database.py new file mode 100644 index 0000000..b4b306b --- /dev/null +++ b/python3.9libs/searcher/database.py @@ -0,0 +1,437 @@ +from __future__ import print_function +from __future__ import absolute_import +import weakref + +import hou +import os + +from . import util +from . import settings_data +from . import ptime as ptime + +from peewee import * +from playhouse.sqlite_ext import SqliteExtDatabase, RowIDField, FTS5Model, SearchField +import time + +# --------------------------------------------- hou.session +# NOTE hou.session ---------------------------------------- +def get_settings(): + return getattr(hou.session, "SETTINGS", None) + +def get_dbconnection(): + return getattr(hou.session, "DBCONNECTION", None) + + +scriptpath = os.path.dirname(os.path.realpath(__file__)) +db = get_dbconnection() + +# --------------------------------------------------------- DatabaseModels +# SECTION DatabaseModels ------------------------------------------------- +# ------------------------------------------------ Settings +# NOTE Settings ------------------------------------------- +class Settings(Model): + id = IntegerField(unique=True) + indexvalue = IntegerField() + defaulthotkey = TextField() + searchdescription = IntegerField() + searchprefix = IntegerField() + searchcurrentcontext = IntegerField() + lastused = TextField() + + class Meta: + table_name = 'settings' + database = db + +# ------------------------------------------------ HContext +# NOTE HContext ------------------------------------------- +class HContext(Model): + id = AutoField() + context = TextField(unique=True) + title = TextField() + description = TextField() + + class Meta: + table_name = 'hcontext' + database = db + +# # # ------------------------------------------- HContextIndex +# # # NOTE HContextIndex -------------------------------------- +# class HContextIndex(FTS5Model): +# # rowid = RowIDField() +# context = SearchField() +# title = SearchField() +# description = SearchField() + +# class Meta: +# database = db +# options = {'prefix': [2, 3], 'tokenize': 'porter'} + +# ------------------------------------------------- Hotkeys +# NOTE Hotkeys -------------------------------------------- +class Hotkeys(Model): + hotkey_symbol = CharField(unique=True) + label = CharField() + description = TextField() + assignments = TextField() + context = TextField() + + class Meta: + table_name = 'hotkeys' + database = db + +# -------------------------------------------- HotkeysIndex +# NOTE HotkeysIndex --------------------------------------- +class HotkeysIndex(FTS5Model): + # rowid = RowIDField() + hotkey_symbol = SearchField(unindexed=True) + label = SearchField() + description = SearchField() + assignments = SearchField(unindexed=True) + context = SearchField(unindexed=True) + + def clear_index(self): + HotkeysIndex.delete().where(HotkeysIndex.rowid == self.id).execute() + + class Meta: + # table_name = 'hotkeysindex' + database = db + options = {'prefix': [2, 3], 'tokenize': 'porter'} +# !SECTION + +# -------------------------------------------------------------- Functions +# SECTION Functions ------------------------------------------------------ +# ----------------------------------------------- py_unique +# NOTE py_unique ------------------------------------------ +def py_unique(data): + return list(set(data)) + +# ------------------------------------------------- getdata +# NOTE getdata -------------------------------------------- +def getdata(): + rval = [] + contextdata = [] + hotkeydata = [] + + def getcontexts(r, context_symbol, root): + keys = None + branches = hou.hotkeys.contextsInContext(context_symbol) + for branch in branches: + branch_path = "%s/%s" % (r, branch['label']) + contextdata.append( + {'context': branch['symbol'], + 'title': branch['label'], + 'description': branch['help']} + ) + commands = hou.hotkeys.commandsInContext(branch['symbol']) + for command in commands: + keys = hou.hotkeys.assignments(command['symbol']) + ctx = command['symbol'].rsplit('.', 1) + hotkeydata.append( + {'hotkey_symbol': command['symbol'], + 'label': command['label'], + 'description': command['help'], + 'assignments': " ".join(keys), + 'context': ctx[0]} + ) + getcontexts(branch_path, branch['symbol'], root) + + getcontexts("", "", rval) + return contextdata, hotkeydata +# !SECTION + +# ----------------------------------------------------------- Database +# SECTION Database --------------------------------------------------- +class Databases(object): + def __init__(self): + + self.settings = get_settings() + self.isdebug = util.bc(self.settings[util.SETTINGS_KEYS[4]]) + inmemory = util.bc(self.settings[util.SETTINGS_KEYS[0]]) + if inmemory: + val = ':memory:' + else: + val = (self.settings[util.SETTINGS_KEYS[1]]) + + self.db = db + if not self.db: + hou.session.DBCONNECTION = DatabaseProxy() + self.db.initialize( + SqliteExtDatabase( + val, + pragmas=( + ("cache_size", -1024 * 64), + ("journal_mode", "off"), + ("temp_store", "memory"), + ("synchronous", 0) + ))) + if inmemory or not os.path.isfile(self.settings[util.SETTINGS_KEYS[1]]): + db.create_tables([ + Settings, + HContext, + Hotkeys, + HotkeysIndex,] + ) + self.initialsetup(self.cur) + + self.cur = db.cursor() + self.isdebug = None + self.contexttime = 0 + self.hotkeystime = 0 + + # ----------------------------------------------------------- Retrieve + # SECTION Retrieve --------------------------------------------------- + # -------------------------------------- getchangeindex + # NOTE getchangeindex --------------------------------- + def getchangeindex(self): + try: + self.cur.execute("SELECT indexvalue FROM settings") + result = self.cur.fetchall() + return result + except(AttributeError, TypeError) as e: + hou.ui.setStatusMessage(("Could not get Searcher changeindex: " + str(e)), severity=hou.severityType.Error) + + # ------------------------------------------- getlastusedhk + # NOTE getlastusedhk -------------------------------------- + def getlastusedhk(self): + try: + lastkey = self.settings[util.SETTINGS_KEYS[11]] + if str(lastkey) != "": + lasthk = str(lastkey).split(' ') + hkcheck = hou.hotkeys.assignments(str(lasthk[0])) + + if len(hkcheck) is 0: + self.settings[util.SETTINGS_KEYS[11]] = "" + settings_data.savesettings(settingdata) + return + + rmresult = hou.hotkeys.removeAssignment( + str(lasthk[0]).strip(), str(lasthk[1]).strip()) + if rmresult: + hkcheck = hou.hotkeys.assignments(str(lasthk[0])) + hou.hotkeys.saveOverrides() + if len(hkcheck) is 0: + self.settings[util.SETTINGS_KEYS[11]] = "" + settings_data.savesettings(settingdata) + self.updatechangeindex(int(currentidx)) + else: + hou.hotkeys.clearAssignments(str(lasthk[0])) + hou.hotkeys.saveOverrides() + hkcheck = hou.hotkeys.assignments(str(lasthk[0])) + if len(hkcheck) is 0: + self.settings[util.SETTINGS_KEYS[11]] = "" + settings_data.savesettings(settingdata) + self.updatechangeindex(int(currentidx)) + else: + if hou.isUIAvailable(): + hou.ui.setStatusMessage(("Could not clear last assigned temp hotkey on last attempt:"), severity=hou.severityType.Warning) + else: + print("Could not clear last assigned temp hotkey on last attempt:") + else: + if hou.isUIAvailable(): + hou.ui.setStatusMessage(("Could not clear last assigned temp hotkey:"), severity=hou.severityType.Warning) + else: + print("Could not clear last assigned temp hotkey:") + + except(AttributeError, TypeError) as e: + if hou.isUIAvailable(): + hou.ui.setStatusMessage(("Could not query last assigned temp hotkey:" + str(e)), severity=hou.severityType.Warning) + else: + print("Could not query last assigned temp hotkey: " + str(e)) + + # -------------------------------------------- getdefhotkey + # NOTE getdefhotkey --------------------------------------- + def getdefhotkey(self): + try: + self.cur.execute("SELECT defaulthotkey FROM settings") + result = self.cur.fetchall() + return result + except(AttributeError, TypeError) as e: + hou.ui.setStatusMessage(("Could not get Searcher default hotkey: " + str(e)), severity=hou.severityType.Error) + + # -------------------------------------------- gethcontexts + # NOTE gethcontexts --------------------------------------- + def gethcontexts(self): + try: + self.cur.execute("SELECT * FROM hcontext") + result = self.cur.fetchall() + return result + except(AttributeError, TypeError) as e: + hou.ui.setStatusMessage(("Could not get Searcher hcontext: " + str(e)), severity=hou.severityType.Error) + + # ------------------------------------------- gethcontextod + # NOTE gethcontextod -------------------------------------- + def gethcontextod(self, inputlist): + try: + time1 = ptime.time() + result = [] + query = (HContext + .select() + .where(HContext.context.in_(inputlist))).execute() + for hctx in query: + result.append((hctx.title, hctx.description, hctx.context)) + uniqueresult = py_unique(result) + time2 = ptime.time() + self.contexttime = ((time2 - time1) * 1000.0) + return uniqueresult, self.contexttime + except(AttributeError, TypeError) as e: + hou.ui.setStatusMessage(("Could not update Searcher context database: " + str(e)), severity=hou.severityType.Error) + + # ---------------------------------------- ctxfilterresults + # NOTE ctxfilterresults ----------------------------------- + def ctxfilterresults(self, inputTerm): + try: + result = [] + query = (Hotkeys + .select() + .where(Hotkeys.context.in_(inputTerm))).execute() + for hctx in query: + result.append((hctx.label, hctx.description, hctx.assignments, hctx.hotkey_symbol, hctx.context)) + uniqueresult = py_unique(result) + return uniqueresult + except(AttributeError, TypeError) as e: + hou.ui.setStatusMessage(("Could not get Searcher context results: " + str(e)), severity=hou.severityType.Error) + + # ------------------------------------------- searchresults + # NOTE searchresults -------------------------------------- + def searchresults(self, inputTerm, debug, limit=0): + self.isdebug = debug + try: + time1 = ptime.time() + self.cur.execute( + "SELECT label, description, assignments, hotkey_symbol, context FROM hotkeysindex WHERE hotkeysindex MATCH '" + + str(inputTerm) + + "' ORDER BY rank" + + " LIMIT " + + str(limit) + ) + result = self.cur.fetchall() + uniqueresult = py_unique(result) + + time2 = ptime.time() + self.hotkeystime = ((time2 - time1) * 1000.0) + + return uniqueresult, self.hotkeystime + except(AttributeError, TypeError) as e: + hou.ui.setStatusMessage(("Could not get Searcher results: " + str(e)), severity=hou.severityType.Error) + # !SECTION + + # ------------------------------------------------------------ Updates + # SECTION Updates ---------------------------------------------------- + # --------------------------------------- updatechangeindex + # NOTE updatechangeindex ---------------------------------- + def updatechangeindex(self, indexval, new=False): + try: + if new is True: + defaultkey = "" + for i in range(len(util.HOTKEYLIST)): + result = hou.hotkeys.findConflicts("h", util.HOTKEYLIST[i]) + if not result: + defaultkey = util.HOTKEYLIST[i] + + Settings.insert(indexvalue=indexval, + defaulthotkey=defaultkey, searchdescription=0, searchprefix=0, searchcurrentcontext=0, lastused="", id=1).execute() + else: + Settings.update(indexvalue=indexval).where( + Settings.id == 1).execute() + except(AttributeError, TypeError) as e: + if hou.isUIAvailable(): + hou.ui.setStatusMessage( + ("Could not update Searcher context database: " + str(e)), + severity=hou.severityType.Warning + ) + else: + print("Could not update Searcher context database: " + str(e)) + + # --------------------------------------------- updatetmphk + # NOTE updatetmphk ---------------------------------------- + def updatetmphk(self, tmpkey): + try: + _ = Settings.update( + defaulthotkey=tmpkey).where(id == 1).execute() + return + except(AttributeError, TypeError) as e: + hou.ui.setStatusMessage(("Could not update Searcher temp hotkey: " + str(e)), severity=hou.severityType.Error) + + # ------------------------------------------- updatelastkey + # NOTE updatelastkey -------------------------------------- + def updatelastkey(self, lastkey): + try: + _ = Settings.update(lastused=lastkey).where(id == 1).execute() + return + except(AttributeError, TypeError) as e: + hou.ui.setStatusMessage(("Could not update Searcher temp hotkey: " + str(e)), severity=hou.severityType.Error) + + # ------------------------------------------- updatecontext + # NOTE updatecontext -------------------------------------- + def updatecontext(self, debug): + self.isdebug = debug + try: + time1 = ptime.time() + self.cleardatabase() + ctxdata, hkeydata = getdata() + with db.atomic(): + for data_dict in ctxdata: + HContext.replace_many(data_dict).execute() + with db.atomic(): + for idx in hkeydata: + Hotkeys.replace_many(idx).execute() + HotkeysIndex.replace_many(idx).execute() + time2 = ptime.time() + if self.isdebug and self.isdebug.level in {"TIMER", "ALL"}: + res = ((time2 - time1) * 1000.0) + if hou.isUIAvailable(): + hou.ui.setStatusMessage( + ('DB update took %0.4f ms' % res), severity=hou.severityType.Message) + else: + print('DB update took %0.4f ms' % res) + return res + + except(AttributeError, TypeError) as e: + hou.ui.setStatusMessage(("Could not update Searcher context database: " + str(e)), severity=hou.severityType.Error) + # !SECTION + + # ------------------------------------------- cleardatabase + # NOTE cleardatabase -------------------------------------- + def cleardatabase(self): + try: + delhk = "DELETE FROM hotkeys" + delctx = "DELETE FROM hcontext" + delhkindex = "DELETE FROM hotkeysindex" + # delhcindex = "DELETE FROM hcontextindex" + self.cur.execute(delhk) + self.cur.execute(delctx) + self.cur.execute(delhkindex) + result = self.cur.fetchall() + + return result + except(AttributeError, TypeError) as e: + hou.ui.setStatusMessage(("Could not update Searcher temp hotkey: " + str(e)),severity=hou.severityType.Error) + + # -------------------------------------------- initialsetup + # NOTE initialsetup --------------------------------------- + def initialsetup(self): + currentidx = hou.hotkeys.changeIndex() + chindex = self.getchangeindex() + + if len(chindex) == 0: + chindex = int(currentidx) + self.updatechangeindex(chindex, True) + self.updatecontext(self.isdebug) + if hou.isUIAvailable(): + hou.ui.setStatusMessage( + "Searcher database created", severity=hou.severityType.Message) + else: + print("Searcher database created") + else: + chindex = int(chindex[0][0]) + + if int(currentidx) != chindex: + self.getlastusedhk() + self.updatecontext() + self.updatechangeindex(int(currentidx)) + + if hou.isUIAvailable(): + hou.ui.setStatusMessage( + "Searcher database created and populated", severity=hou.severityType.Message) + # !SECTION +# !SECTION \ No newline at end of file diff --git a/python3.9libs/searcher/datahandler.py b/python3.9libs/searcher/datahandler.py new file mode 100644 index 0000000..6658a9f --- /dev/null +++ b/python3.9libs/searcher/datahandler.py @@ -0,0 +1,121 @@ +from __future__ import print_function +from __future__ import absolute_import + +import hou +import os + +from hutil.py23 import reload + +from searcher import util +from searcher import settings_data +from searcher import ptime as ptime +from searcher import database + +import os +import threading +import hdefereval as hd + +reload(database) + +def get_db(): + return getattr(hou.session, "DATABASE", None) + +def worker(): + hd.executeInMainThreadWithResult(DataHandler().updatedata) + + +class DataHandler(object): + """Searcher data and communication handler""" + + def __init__(self, debug=None): + self.db = get_db() + if not self.db: + hou.session.DATABASE = database.Databases() + self.db = get_db() + + self.isdebug = debug + self.scriptpath = os.path.dirname(os.path.realpath(__file__)) + + # ----------------------------------------------------------- Retrieve + # SECTION Retrieve --------------------------------------------------- + # -------------------------------------- getchangeindex + # NOTE getchangeindex --------------------------------- + def getchangeindex(self): + index = self.db.getchangeindex() + return index + + # ------------------------------------ getdefaulthotkey + # NOTE getdefaulthotkey ------------------------------- + def getdefaulthotkey(self): + index = self.db.getdefhotkey() + return index + + # ----------------------------------------- gethcontext + # NOTE gethcontext ------------------------------------ + # @staticmethod + def gethcontext(self): + results = self.db.gethcontexts() + return results + + # --------------------------------------- gethcontextod + # NOTE gethcontextod ---------------------------------- + def gethcontextod(self, inputtext): + results, timer = self.db.gethcontextod(inputtext) + return results, timer + + # ------------------------------------------- searchctx + # NOTE searchctx -------------------------------------- + def searchctx(self, txt): + results = self.db.ctxfilterresults(txt) + return results + + # ------------------------------------------ searchtext + # NOTE searchtext ------------------------------------- + def searchtext(self, txt, debug, limit=0): + self.isdebug = debug + results, timer = self.db.searchresults(txt, self.isdebug, limit) + return results, timer + + # !SECTION Retrieve + + # ------------------------------------------------------------- Update + # SECTION Update ----------------------------------------------------- + # ----------------------------------- updatechangeindex + # NOTE updatechangeindex ------------------------------ + def updatechangeindex(self, indexval, new=False): + self.db.updatechangeindex(indexval, new) + return + + # ------------------------------------- updatedataasync + # NOTE updatedataasync -------------------------------- + def updatedataasync(self, debug): + self.isdebug = debug + thread = threading.Thread(target=worker) + thread.daemon = True + thread.start() + + # ------------------------------------------ updatedata + # NOTE updatedata ------------------------------------- + def updatedata(self): + self.db.updatecontext(self.isdebug) + return + + # ------------------------------------- updatetmphotkey + # NOTE updatetmphotkey -------------------------------- + def updatetmphotkey(self, tmpkey): + self.db.updatetmphk(tmpkey) + return + + # ---------------------------------------- updatelasthk + # NOTE updatelasthk ----------------------------------- + def updatelasthk(self, lastkey): + self.db.updatelastkey(lastkey) + return + + # !SECTION Update + + # --------------------------------------------- cleardb + # NOTE cleardb ---------------------------------------- + def cleardb(self): + results = self.db.cleardatabase() + return results \ No newline at end of file diff --git a/python3.9libs/searcher/debugutils.py b/python3.9libs/searcher/debugutils.py new file mode 100644 index 0000000..a5bab8a --- /dev/null +++ b/python3.9libs/searcher/debugutils.py @@ -0,0 +1,48 @@ +from __future__ import print_function +from __future__ import division + +from builtins import range +from past.utils import old_div +from hutil.Qt import QtCore +from hutil.Qt import QtGui +from hutil.Qt import QtWidgets + + +def dumpWidgetLayout(widget, prefix=''): + """ Debug utility to print out tree of widgets with relevant layout + properties + """ + text = "" + if not isinstance(widget, QtWidgets.QWidget) \ + and not isinstance(widget, QtWidgets.QDialog): + return + + text += str((prefix, "* name:", str(widget.objectName()))) + text += str((prefix, " visible:", str(widget.isVisible()))) + text += str((prefix, " minimumSize:", str(widget.minimumSize()))) + text += str((prefix, " minimumSizeHint:", str(widget.minimumSizeHint()))) + text += str((prefix, " sizeHint:", str(widget.sizeHint()))) + text += str((prefix, " contentsMargins:", str(widget.contentsMargins()))) + text += str((prefix, " sizePolicy:", str(widget.sizePolicy()))) + if widget.layout(): + layout = widget.layout() + text += str((prefix, " layout.minimumSize:", str(widget.layout().minimumSize()))) + text += str((prefix, " layout.sizeHint:", str(widget.layout().sizeHint()))) + text += str((prefix, " layout.contentsMargins:", str(widget.layout().contentsMargins()))) + for i in range(0, layout.count()): + item = layout.itemAt(i) + dir_flag = item.expandingDirections() + if (dir_flag & QtCore.Qt.Orientation.Horizontal): + text += str((prefix, " -> ", i, ": expand HORIZ")) + elif (dir_flag & QtCore.Qt.Orientation.Vertical): + text += str((prefix, " -> ", i, ": expand VERTICAL")) + else: + text += str((prefix, " -> ", i, ": NO expand")) + text += str((prefix, " -> ", i, ": sizeHint", str(item.sizeHint()))) + if item.widget(): + text += str((prefix, " -> ", i, ": widget.sizeHint", str(item.widget().sizeHint()))) + text += str((prefix, " numChildren:", len(widget.children()))) + for child in widget.children(): + dumpWidgetLayout(child, prefix + ' ') + + return text diff --git a/python3.9libs/searcher/enum.py b/python3.9libs/searcher/enum.py new file mode 100644 index 0000000..83d8868 --- /dev/null +++ b/python3.9libs/searcher/enum.py @@ -0,0 +1,38 @@ +# -------------------------------------------- +# http://code.activestate.com/recipes/413486/ +# -------------------------------------------- +def Enum(*names): + ##assert names, "Empty enums are not supported" # <- Don't like empty enums? Uncomment! + + class EnumClass(object): + __slots__ = names + def __iter__(self): return iter(constants) + def __len__(self): return len(constants) + def __getitem__(self, i): return constants[i] + def __repr__(self): return 'Enum' + str(names) + def __str__(self): return 'enum ' + str(constants) + + class EnumValue(object): + __slots__ = ('__value') + def __init__(self, value): self.__value = value + Value = property(lambda self: self.__value) + EnumType = property(lambda self: EnumType) + def __hash__(self): return hash(self.__value) + def __cmp__(self, other): + # C fans might want to remove the following assertion + # to make all enums comparable by ordinal value {;)) + assert self.EnumType is other.EnumType, "Only values from the same enum are comparable" + return cmp(self.__value, other.__value) + def __invert__(self): return constants[maximum - self.__value] + def __nonzero__(self): return bool(self.__value) + def __repr__(self): return str(names[self.__value]) + + maximum = len(names) - 1 + constants = [None] * len(names) + for i, each in enumerate(names): + val = EnumValue(i) + setattr(EnumClass, each, val) + constants[i] = val + constants = tuple(constants) + EnumType = EnumClass() + return EnumType \ No newline at end of file diff --git a/python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-Bold-Italic.ttf b/python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-Bold-Italic.ttf new file mode 100644 index 0000000000000000000000000000000000000000..87b9bf8c920946ed2284d2935014f7874c0e69f4 GIT binary patch literal 148512 zcmeFad7NBDxj$TWPVd`H_w4)9v+pxK`y`W2GMP-4NoKN7GRb6;kOdM5N!XJRL)aI& z$f6h(5xIy65dqQ5Pmv3ViVzVIBOoAdT;wVuf{@Pd`&6CkKE2F@_w#xGc|V^wFn#)T zJyp;C)KgE@>1LcU7Ql;zl@2UlFduiEX#=^qI_3||8)RwhOnlUU`^@cocb%Yu7p4>LMcQ-yyNBcfp9ou$ZwDrVAuD>#N@Xw52 zad7(?lUvfhzP6od*6nET+>Q^pX5EPUJ8)mIeb>GVfBo&MTHHU$nDgSD=WL$z&iqzC z)6&9>S#7%}FWhapG<`MGGKTQ<VOW0C2 z!baILww$eCE7>Zxnyq1DY%N>I#@TwdflaWDY?5tao7on22HVQEL2f%>Q)jWAY!^G5 zox^ssbJ-rYm+fQcvGdsl>_T=CyO>?V_Ok=*AiIzNnY57`%)~_wr)@s|e^R*+|P1>E>gW40?aqTtjU7hJZy;yJ1XXuOd zas6!lpnj8nhkn2Qg#Lp5rv9GAV#%~rSlTTEmKByQmWwUdSZ=r6V|m2#jOAs^+m?S@ zUDkYSjkVi4WF5EewC=ZFXFY1Y*ZQdS8S6{dKUzPq*=*Ui8e6w*$hO9|)pow^3ft#x zciSGZJ!yN<_NMJ`wus$lFSa+>d+kH^HTLcH3+>m~Z@1rT|GxdF_7nC$+dp);9QlqK zN4I0hvBt68alYe-<7UTQjz=6%J6>|U<#^A@oIYovv%%Tx9CEI4ZguW+9&z5}yu*3S z`2**(&J)f*J3nw)T$!$NSBtCPwZt{<+Tz;j+UMHuI^w#{b+hZJ>n_)Qu7_NYxt?@A z>w3}kit8=cJFfR#pSUeAVMS@(bc8vpXVXZW1c5H&w5_;yyAJw^N#0z&nI4s*W=Cf7J4hZ4c>O| z4DWzRpC09`wHM^T|(qb-r=m4Zg>GZ>QzT5z^YzR;68- zcB^rBciJ;)Z>KxtU3zYMZ~A!p#m3z=>G!2Smu_HA|HNPH5BoRBHvifF8~qRZkBd92 z|Mh??&>mPDI3(`_w+0>!oCti9kuUEvYBH8(?9aF};~CjIK9TuOR(95mtWDzHmUUs)omtOjeUM$4Jx9W4ugbnK`}XWdC4BZX+3)2P=FG|2 zE?aXB=G>9v6#l0=> z-Tc!0W%-Bl@6UflLgar~P+qXOU{As4#l5}Y{(|EL?-gbib{4KFJYRffDLhnoXW^5D zZyI;+6=fE67Ojx*MO%ulD7vTUr$z4+`-&UHXV&7*;*G`E6dx;o$+&-~B)cSBvbE$o zd0%p8$+IONlopmsj%}rDO0Ow>tn}@&+_FIlQMRermB;Edqb;ElmYf(BQ?_bLi21}k<}+$7s8?y2}`#XFV0N=c!kvbS=) z@?hm1l}}W@Rb{KHkWf_(RjaBFRoz|nta1NlwWYeMdZ>D1R(Q3(+rTULG zj+(lfA<<^5SyOXy&C!}4)Vy93k-ckOwe7VVYWLUPDIsbft$nl3Q`cEHUUyhR)ZJS5 zc-`Ce>GhrRzJ782`So|yKVAQhyl>DNDjEhGwl`cM?kx>BH$2kta>EC*wb9nt(zv$q zP~+Xk{bP-PY)WtHY#JB$_NF~eH#a@f^m5a`n+uzJB~0@`^Va68n(u9vmTGN&sfD%F zwT!f!FWXzLYk8#Q^;TP}l#{h}PV1J|D_ZYqeOk0zT3>Aape?se(ralOY}?*;P1`-P zZ`-47C)z#<6^CX>*wDhz&d?2^`($hAvCtpe)7v}SC3V*JJ?%HQKhpkkyTMI|rK721 zO~=8GyCi(a_dDL|^mVp(%2;6Q+}(L^=PO;VuI?_W4Qtoou7|o_>$Y^)$sXO|?mgW{ zyPxR(vvD8kY3W(pbErqk$I|my&mVgoy;ZWcx3hOm@5Q}Gd%thoKhyhmpQo>>Z?U+y z^{wsO-}mLd$NS!#fkca9(+tmyrWqqM&X({q4$in|#tSn(oLMYeXSUB=HS@xmx60O; zch7ug=G*QW);rrpS5w;LD_TG4YQ8TdSTZ4 z;efmkmxdRH_l0i{KQ5ueFNHsxojk{&h^agoI5_(u)evUpZmnzcLo9jaxB}xl7T}54-ULM&o-}J_Lw&^@8WrP z&HL%RzsdVS+hEJ!+QCDEQli$u#|Hm6KYf1Zd_$-6_sl;!|LOVf3>mZ!)eJ2g+COyX zkhC_-&}$3Wg7O72E?O6iEZDQ)<^_)|cu|aQTkz&W%fhOKLkrJbc$LJr@aBa-Sor5f zzD1HA%c6ygb}qVMkzrAb9$WOs#p#PX7aQ0Y?^%3w@zaan88)yF*9@-HuceEDm}-McH&SG27dk#{RLt~k8nt`$!hcQ34Xe`WScLnA8(R-V1` z^DBR_@-5lA^53fpSM{%w60oe=x$1^h$5u%it*c&J?O5Hmdd+Hs-_e1HV|R=_G4|G4P25@62G-74yLIh# zYaf)YYoA&B_Bzix!_wFFt{Y!>aNSYaw(h=l&#n90xKG}V7mtU>H;o@M?rt1^aQwyb z_vGDrZGFZ1!S!3jopt@b^|!8nbp46-AIjDZwhc8K7H*K#S~gs`;r0!WZ8%}Hy*=TX zXqs3odrpi`TrqKM;-!gCWb4Mj#`cY?HtrLv9=`F2alOj8UT0iyG_E%r*UuZ*qsH|? zXS2c2 z=7q-R249=E825XO>mlQ6*yZLMjQgX;^={*8(7oBv%Vt9_o1Zn>Uo)<68rQ!W*MA$= zh`erb7*~&REjF(0#?_#Aiy`+dhTOO8Gup2)u6Gz$LylV>H}0P^uCE){cjfgAW?X&7 zwcEH3qDsI~58$W+aIF6cj$?Ihe$~aNsIdTMDhQ5x0!JM|;<=Xk^N);CWPjOC)<`}d z!$J6En}*s$MM?@Jyc$Obp3^~UsBPq9j|f%$V3+h6gS|AvKwMhQ5i_AnczMj zqeY>j?TR)vmJX;YR7z4wGM;aO$V82X#{G0XbfJNA^s6!W`fQBiB<`fXI4U^=Pn6%7 z2p#2uv>+`&1AG#66i#tVv}eP13ge*@;1qOf*(@h&Pz9pb6{gqC7&oX9s) zByJ7O6ZEH%jiia7rm#O%Vx5GsIH%-gGXpLHZD*2nWOV&4){x=K$rF}~Nr2NhJRe#w| z`v08hHJ$y<7ZB<-6@Jx5I-H**e}Td9HEJcQV)$=P7+tlQqksJP3X2>kKKiGIOpN}w z5YD*xD5=iG=W>=H$=n))0#|0N=IAe=qU{PQ*^AT&1hH7iDACRq8#ok1vYzD_5>Ad` z*0Y&jYK@n(nND-xspK5PGPRr)jG{PE&bP%RXy{*Jj+gW2V^ENc8jG-+)_6tY`E1>V4|ypjzX{Ow{w&VicL^O4RdRF(?y#Bz3c%1yrQigjEE)Np zXeNsZ;A&p+n;~P;OVse)DR?8y@ftQk8MG>_W(^A{qdi7jay*KsES0yb-b%{EUwpP; zhEUcvHH2DUO%0J|iVK8|vHy%o2G*x!tk#F3)g+7Qxunsj;vtRj;Iv6h-^ut@|5!{j zeH!EP8`K)L=$M4eGl#+*4=Kl)+9Or3dG8b@KBiVKDJ^jt{mQsbqL zjtXWfF7As-c^WQEP{ur^u$s9LP|=8?Fsev9_58PjXoyz+WE@GZhc1ePitmv;|CuO` zo7X5#CeMH0N|J)2iL{W7ze4m<^-vRZlxj(jq&e*Gb#l|pG`0Z&a6s?9Y(delx!AxUp4owwz zC8UBOdPri4mSCc28ef0VM7J6-*%Aa)G!`g~r{hA=sq__Z&#C!M(HF7*1a-4X3 zOAVQ5Z{LkkWTGoEem)w5GI38>%`>EcindQJBQs^wX+lc-^bpDM(@d}GFJ(z912dh} znyf`Lg(`PWWh+zV-e&x&e`@@xb8j<#wYQN9|7OyQ*cVi>iF9IcXy{Y*PmN!-#*Z%N zHYtA@zo(l6m4v4r~Aydty{}toZB>zNy zAB#avv%evrqIy!aosM5Q-wJkHZvCrZ_GZ-T3ba@ z!P6e}Dj`=*!9_fb?3J9=nCLUOpz)^DMLbqB7yn046_1z3O7v}6$C9sPNy!k>f?^P) zrFidGZN!%x!|Y3f5|eF7EvtM$(y2yBg7bd7EmC0N@L1P(_Md3|l3uYe3cayDE&ZZJBHH7q;7zF7t z-kwyOq$!ofnIR+=v&9Q;44J416PwMy(#{X#{k}3RelDA1~(w3D^JV~vl zIDa~8il-xS#!{{EV7} z{5VG6G+daV@D!mME0K$5j23E-K_y$fl<#y9vMmOYZ1Jj1(V}EUkc|C=C^7LvpT_K2 zv`$N*99_ODe~9-u)kgS}tyN$!@T>l*@you6bDSA}>iq`^p)$kRERf8<>Yp0F%xtH_ zuT}`D_*ZnM@_5yjT0=iIX-Hv+QTQ`zH8hlZ4MJyhIi6>guc`2>HU*(9KuSnpFf_C< zX^wq1VRY5kJja^(Q|~1miSb0W#`Bcui2|pj$e>Gdo(WoFwZfkhWzlvyqQq*BCju%e zKLwTSiDt^w-g_!NFA?%MJw&o6nCVshV|q5zNv)@ov!o;DQSmD($&G>`t_dy$R2mQ$MwCkU5F@RaFI)loI#Q&9LV8B>m()1QX?wKa;MK_YjhxO{*ov zM{Ezl1RbrnD{CPhd$SmnJvJMgNh>Q3b9uF6bvINQ4aq4OEE-3|- zS&LSaB_+=x=FydQQsIxcnyDeAUrBD0DO4C@bBLKjsrSTe#!R7-cq;tT5(xiPGK|@Z zit))5Dh#P9l)9Xb!qod+W*$=4Jmq5!di*LckJqBa zudDKuvi(vBpBf(Ap+3Pa4!%oL{1{LTC;UrR;dRK9A)ullFPpW0W=_-j+f3JFmw zWK9t9c5cS6`lrS(`zHF?CNZvhO(lQTMi#PZTKv=+?`OYG7(LlS%%dy$#miqp#Nq! zc5cS6`lrS(qiUj^uM!eeuc_=@wULD6zJnZJ#u+pI*e-|4YfdDLu4HGnA|+WGU+$d8 zmk^jU`%^6icmZk5>mks zJV7RMAuz{rA!|tST$rE|u$sAeMQ{<1_w-yyuBYch!A!-)tAzQqT$rGY>a47=(rlS zqB(xOoGcCV*vYZl%vlUl&Pwt2X@-ogcM@grJJLpy43sw130D)8!IR>~tXTn-#PjK5 zMU1woxlk}uY4Y_?#l;&jDAC28>f+59eNrRB zY7=!~f=a+@=HmATfAO@PjtfO+>e)Y;Uy8_yTnNl@7L_V)o1hF`sM%sFE@HHOIxb?f zXuMyTxroiir{_Y>yYX>5nG1z=dM;AW;qjEEn!_bTjA|tbvSG&FirEil5v4;lhl^H2 z)~WHEqwNhw|M(nALfjy1>+}#5Bc`TM^^aval0t=p`X**M3Nu+X^-qRSy%hvWCZ31H zF{Xz|=HU+*(TJFW2I?Q<;g8fRzm>zFFgb?D%bDO+G)-K2o4HZek?e5-XPn1Ls^e|d z1ZB{tu$paE(jSjEm7J9g67_19kC(HAP`oBu!kW(x&3$mV{I=YRp6~ z-j2~X4Hy3tgPMj50cFfF3U5r0W?iUOqDo3TRuhZoLP9DSf+xsCE(GQ{iZ@D!ixXLP~pjE)>jET)Y#LvZ0GqT$rE?Su3n&E(Dapg~FT4FXgIAz7C~~ z;i`?OCJ1GfWOIy}sZsr7ekrL?Bcz(4hKOflB__YIA~A+1L&)(Z{=ZUxLt5&)31av~ zE2x$@VzirWafN7=uToMf&Lzf$co-U!AQNrTM3F&Ps_1HhnkFlg^vB~pJr~kyr{_Y! zOvS~!F}Y5|g$c^=D23H*ivlXT!=x}OJ2LA+db@m8^RCKEQpE)YLu1DBgVRB#(wd1A zgI}31WMbr4HTr*JQZdchWQk^e1(e}?io#UBCv_lS74@m+#Ml^wX=(^*XJ+sCTO7Sg zk}AU!t%lwtx3aHUBMP^au+kmTk?0*}$fQJ{IYvTElGYhPOf7%aKQ(^YHo^dJ?rLaGq9+CMe^(l@#@6ElT>O1{B`YX;FQnS1K*a zG30kWnYH*YlNMF~m=r~;p+(s@QHyF^C6i<=D(V$PycW&)l^kODKNMO~Bb;7~a(xjS zCzUTM7)4K_76s1JYS9E`tXULRvlb=&Qi}@j>9nZUE~&IA$B?Tdvlc%xX;Jl$Nl~;K zT9kbgwW!8bGD+5=qFzD7Ytf8f$svaS-$ILOgjD>G3JCeS+?WyK`B!a(pCID#3k*gq zQvDP0s}W9zf8{jzR|+}_Vru-Ve`@@)Z=(Fo{M)C&Zx{FpVru-Ve`@@)?{xT=OoM-k zz)uiU<5&Gt!>{&`Q|W(|z<+v(=_RE4r>2lvlXD)khSalSsVE#46q=tGBVS_7 zY@w=uYW%YAbP`%HjfNHo`~)$zhE)I5_+{Ve@T<%>75`JMT+IBd{;Bbsvu<yE)Bo4rgQw3mY_<VM^fYdN+u0WfOKPd3i>|o<+rWBC;BaM8AAjzt1TvVv>UX0GpB6{$-jx35Gj6V3? zH0nd2#kivk!p6UYSNUC_HhlNGd@m`zB|4V;4L5>*NI*~qY2*K6yjOgEq&4Fc9p95T z4`%}bVSL9koz$`M;{sZ|H)>t@%{?KfkJv74htYEYvWkvpa6+FAOHSBsLrxOT$1GYs z+oRQqPc2vr4&X@xl9!D?BJj!g_nY3U99&>wd!sz2p&!YM#@tSyY8raUF-d=hBn|xZ z31N9ajGU@fY0Cl#4Pz4wtb(0`kjuooI8TpU|i>Q=?W`yVgZg!Fam?W5rn1 zZw35c@^xNRlJpyYq>A`h2IoZ@*%DW>I7ctDKMqS4&2cg3*{$Tg+Z761e(RNb((D3wfRvgGpE?cMkkp8ZCBhmAyIb3%?GqzOOCyO&8}w-BYOqd>7i$4s2LH-# zZZ)VFKtH1TVZ6#cBb(+i-YX8;aj(I?6lEc;Rck~4OYpfBUMnfvWbk*$7eQ{?_1g<@Yz> zy^a4&jDLgVPFoyD6MYJr(2uellCOw4nlAcKG!v}`1*(;z0`2q~K1w5MG(%fNe`y6$ z&dWvHGPGGC%^}0#*r!@Z7ICz@xjQK>{rkxRmm!IY{mv$kj+^-$f z4rqu@+Ijf3%Zov?3%SvJ;Scm{rK6as=~qf6UzZph>wp;*pOQshAja14$!6q77Hv#> z4EnNY*T9+>TO_Z118d~-a3B45h-r4*P);TOEu2$Y%D##OOVfXIDxER?H>epgb{jQn zRoZb}N--`P&4o6)!T(AB@G~&@hZN#d{L>Mq>33SU;FnrA;Wu0D z_?^~4=%xp@R15i}`n^~6E3avg+hoFT%vw+-i~Y*1dy3zgT?rfB3_DuIZZdvxc8&PW zSs!LkCw_U>3+=Q(1`fo+G2>Tg*P4EZmVQ$<9X3kea(WN@2Ky#^nLWb(i~W$@!X9V0 zvmdc1*q!W2_TTI&_B4Bj{TR<{{}kW+|1A3%`#F1_J;z>PzhJ*)FY>GTTz(!upYLUd z_;!9aU&B}PbMfm5U*NCso$M7YhfQduS{c89@6ZBTmX@idYq|VFev#(qyV$FIEg$2% zwQRme%hNLOTeOGyHc<67b~k&NpUKa{@7k^tze`KMPrF{r$8QBc#*g4PTJf8;TDR82 zufgx$Uc;{CSFx|N``BIVTkKx;06WIM&F*L4VGm)f2if=7_wk)g7qc(oH+8OOpT+OH z{0I9oyP1839Ywpv+K<=mEDU+k@5Q(vy@TvNeks36E6}>MA8S9+UezDdAGdfcOD*G; z3Cn*JUtXG7nqOL6T3%XJ+EUtEI#9a1bYJO_()-FX%CgJK%c{zn%C&M^d0Kg(JgYpv zytur+e5ib5`5Bczt$Olf4Zl`Q{|FOV^8LV+3rtS~)5`)AxA-ihmJOC0iw~7%l;+|$ zC`$#V9s|>%6qx+Llo!RcMPTB<#M!!2UI28<8KvXk>wHk738FUZ_IuJ-uOoN z4euN9k>CCD4d)v>USIn9|GfU!*VAA3z7D^7-GR?P`rTiD_lw`{XY7@c-=2>_lpk&4 z#T?I}YWSUJ?Pd9g*VnZ7_4D=X^c(aW_1pEM`W^aRgjl}fdI#Ob|I=^9=lGNV^c(4_ zlg0jj+T&pDUwE@YSS-FywZp3h|y{1U#OeU9(o+hNyV<`?se;1QSe zEBKXsExh6`Soz)Lv+%8Nz|y}7oBtLpU0VDJc=3b6>K}$TZ(y&$>dE&1i~S$Io}Ufh z{vo{kNAT|_;Cpw%yPp*P{bP7I{F|Qx5C0i_{O9oR=kXiUufoS)ATMXXV!wswz0TfX zZ?fOx%-2|LLmd@|*{Q{}f<-^f49J>1KEJdLMwKgA3l;2C^`FXfp$i)Zs3 zp3C!iJ}+S3Wykph-^dI3R=%9C;6`Q!Yn`~m(W{%wAY zf1iJkKgu8D-{t?s@8rMb$N5kAGyH4(d47UF$$!p&%74s%#-HGK@u&Gc{OkNVem8%X z|0jQmzs!HZU*IqD-|%1ZU-4h_r}%&K_xT_B-}%G*5&i~$lmCss%m2au%-`nk@Za;d zcq4D(&Af%T@-`7`Db8LaBJQ<_?bqNp!Vzf^3lH(n@WcEvEsJf`inVNt=@fs3KbFD6 zyD;zSm~EV_HQeO1X`0R#Q=`W8ZAkpMh98I5_=JUYz7#(}Z?{?iuludGocd5knO<3z zQC+5&?csOriQLN@#@vw?$F$=oTiUeNzLU>mBt)1z;8_GbZdM(xz`X^=vt2{v=^eIN zHC^x1mjKSq+2rW+li+V&6k;q)KU<&8vV&RW zQBv%oejaKo*6g6Cvnp6_v+LQbFGzFxSI(&EuTHb~9O!g3G`EBuU0T&dTUSx%!p6w$ zd@R)67mD0V`N*kt$ZuNkq(D8}FR5$>9av7;4>7x4QE4KW*=+hG)AhN-4sNy1vMwnI zH=24FL{oX=$5_Pg~ns=Q{Y;wV#3GlJ1d~ zt6N8UN_$%MjDpg%uk*7SJ7=_C7V4Wp>-8&7U8c>_hmrf*zT>v=wt8;M&apS~S-hWj z*?rv3i~LJhW#4o-uQc!JJF+%j65x)l*JT~7`RV6=c*{>}4&+>QCb-mC)u|7J9KEcB zm4pjX7gK_O!3uHdOR6g?%Pr3Q`mT76qyoV-50*D+UEgqYw08`Gi;nj8LC5~|8+Ugu z>&$Xw`SE6XdzSrpTW?S3%C_F#wlnsX^o+D#xfPeuR?t8`lcwwNmmIRV3|L$#q;H*M zR_g#3NnD$NLCqZ2VR)7aXsFKu zKQjoq!(X*D?F6!w&63h=kfy_zb$!M#K}>+0KE9-2(Xm$W5o_&@YYk_j2UyoNO7T>u zv9YiRw_fJWC<|JhdG%bkW%qGAjH(Ot^Xu1Gi;M0Gd~3z1ckqFLcH-pACFSRCSi!%0 zE_!LK61totXbYNkIfpPSUDj5cb=hV3Ua()6&PWCfUEb?xZ*QOPXqJ})j^_F6yP(Z% zM~kCr{)VoOI(0F(VXO)>B;CGujq;F*Mp!QrZixlB{w-R3YdoV27ow z5Ggc^#w~}?3~41r3PWgSFo-P~j6#kvP_z+4>JuvTL68)43RnSRPL>LPb{qaniaS9) zXW#xJtHWX?|BMelyi+^zP)UhDJt+baf5qT$0{nT{#Shytck{)P&3v?pcr4i@(^Z5S z9GW4(VH`v_pLBV7bPRK&!Iy`5`~h=V?+WP|!RnyBc<+H(Yrc8?uRe3bwlzWRL}c%8 zB7cwk<;^!i2*#{M%nTuBmSH`#>ycs~oQFuJ2w%dib`Wi4JK%U6A)fPjBzb)T5p$Lf zcMHZKh#~<7Q;&lh3ukfG)KFDXke8e7PxrW38834b>{96m>N@1pwt`P`m<;i$cF?pVi7>7Q$vWzV$*)~<83h7d2g z&(hiwn(JsR?q1q5*0Qv_xG%KjTM(bIldXDU(3-} zVU=n7j$fmgX}5M&B3>d&S5-Ih>dG!HCo58#&D+xo-CmF8@w)T8k-um1s}DT4|A5Vo ziQLIG&0+n_ZW_;h>dRW5z8I^{ZHta&L5kJPiYeW?2iR;V15u4yvv9b$Mb|B3Bu+$B zi#}Qqu91-1q0@mPj8*4g4OLxAVJw5ZG!3C%1O=LJlpOo@S;$^!wyEcL%&VJI>vNVn zOPekTTrg%!vjn>q*3HWZRWw7i1+n*g7%7FNMD@SG^h6SwRL-!eW_-4Q|`X< ziZ2y5mS%9PHn00=StZERkVP<5{*GgE&0>||@-&WIU@;N`c(S$~5WR@ZBIt>5YsI9< z5pH3Q!HiIKRXe7IpiN9k{oh!4sqgur!+*acQ?pv&a1Kgq9)6hTo(h!v(tY^9+>Zf~ z4dnvoJmBLiDXYML^)_c=y-0zJxt-(# znPuxF7CQ_M@lL+cy4-8#@c9ECd*i(E&cE*Y!&zrNl`h6 z?lP-gU$dxXxU*0@zB#fj)bnx=-_{uV5!FWP5pM_J|3TIlMnzFYBwe%U2jSCf5=Ouf zeRW+-Wpk}duqT$Anc+`!Ic!!IBt zk(}@l3{}j#P&lmB9&DExv5{{DdHu(Qj;0|o2WMec#@o&ZYDnm;|P1+JP&Fj0Tka$z?@eB~8FOVMA67(Sz=|RfEi& z>_OyC<^J23e!O~OEv)^5Ww5qzGlM}PKpcu$n3XKdNF_O= zX~Ay8iYEgZGEEI+US-J5j5IQ2%YLVG$T2H&D!>np`fu73TH2YfAJ95C_qX$__eLx` zE-308Zf@uRp}Eb>|oM>>jc>tyY9EL~476 zbLsX%1Wyft6v4`}P&@vIyd19wvs}LW4_E;HGN&Psm0{DJR)0h0H^1qxNDl-uvI31c zkwziw4DhfVJd`l%q%! z->Aa|Jw(aiIeTtj51nz3rdh0m=RgJJX?2-7xy4Q0`TQ}E_F86rMs`+iK~7guWG=`M z@oY6{@nBA>3s)n4k<-Ub3}R03c{2mvEMHbt8HmcO4`E88cuUzoS)P~LLQXIbI90pD7h9+PAgx?AtzXED>o7#BNzWrWY^86oZG#i zcl^>}t7~~&R_>xa8~JxM_a4|(@W@gAN-QlWn#n&FLr!Us7`zkdi-j+?!9i*A-iFX+ z1Yb3Sr{`p)=lXN~{#+y+`7+@kg$X|iawW2B-|1S8nD1UacjFZ+CYuiMIxPJVh|UQ@ zY~3TR3%2D%p68{gEL?7g4?a*L)+fKkFP)#c=$KE$REw2cEqgF`B5^thYIKSd5T%+u z=Q0sf1sFdxE!?KVL+Y8TJSJT{FJ(!=9yJxg{xU&c@1D;nx{^vsec2*WHw&+N#)oiDn-OlWo zmuR(@(N%^x8_ZZ4&|HD1Hy+%mt>3%%B+h(Z zmjhAN&oLBnjTK`Et4CrRZ+!g2Gk2tq9vpQvwzY*FqX$O)*Shv?)lh|O@9pW}ry^|I zUZQsl^j<3PWQ7AD7J&)MKv?n@rRzi^d39E%UBn}d%!6Z>)`2TZ=iK&R9gh6`8;E*; z@dsr(7xZg6CtEOX6~?^^<2JJPaBCUlfTai45NrZbPi2tSjSjn}CZR^wNTCGr#EHb0 z3Rkc~EZ-;&N{-BTDk{n-SXH;FiraLH#f{+SvR;32CX8Lz-Bt^hc^7Pc?1pU3ij3T4 zL96{5L>r&KIVZocFh8f+&) z$cknM2&*tqi1`5Sqa%9bzi9MET3n@~HDdR>mtPrnBMgEttJk^z>VJOr$^|Y&Xp6;W z^}4RUhVSL;A1w5{91f==$NNy^uE=#iF37~K%aQAQM%mY8pe2Kqhf4xB1Z2Kgmm-Z$ zPZ=yDP(;&H0Zg4DvLQnh7P|Cbj##mBwdq!i*LC+5kyon^RNnxJ{K;SJ@pi+$2{*tg~n6=gqd>)3C04zh6KIDXGls*85nYPVeE-DyyEcEPUUAzOxwEjTkrlH>l zB7*_GYr%5??fAY(z_MpA|Eq=|AjW_G)Cc^lsMEVyMmU{5F*2))at#bX_mm$|s!;W7 z$IQmYdWRja4Qm=ZIvN`~J82A!1>q|dSmzca(tKC0Z);(K4#pjanAM>>tokA5a4=EH zoHy*mx=y!jLQoWirA5c85t>jM)Et_*gIM)aFW^Y(B=Y~$!*9fF(I*kwHo^So+LmBR zA1E!Ut}JONZ72_v1^t0a7YyFKT8`F$f-*wI*kAF!ICPv?H8v5Kh*1Cq4St!lb9Y;yAySOtn5!n>T-qO|B){RMSMtjrIrjEYQ zW$iPB{Bj__O3ZVGSlK=(InQFi0; z))}H4+tp0uH9%(du(=Yk7Q`A=*Q_u$3;gOJrZi#3LPBy?C_;8oVSX+agFcsomGBZZ zSy2U-a+IuK6U^L3DyASF^6^ttP_<|KZ#<`Yad*i9-rd+a;K)I}wPmjFK+z4Z64XJi zKVwf}XsEubxhr&iOHX6X_*RHlpV#o@=oTX`@t-D6=af^$c0YN#h@i@^3Z@hU+_#(jdEv zeAVU*b=LN$UE&K3j2zIN?VX+T9XjY9X}y;0vZQBe>z0b{{`}nXuIg+)>hGF4voiqt z`Kfo2dtsf6lHLKMmWYhOhM=oqlia9Ni2|&B66OC{c2$5aNd^=rNEaw?ybZ3_>bn5 zT0AcXUhGOcysf*fzM4<$UbJx4+4;GPc1OOt0UJTg1D^6B?KD=z)=92PVda)d3PmV< zgKr9{D7NXkMkR3Ml&E`C_J$nax+5jPSU4?^o|m0o6eudIq~dpeJ*U#QDivYwj8?zF z1K;>+fUjHXXqdaI?bg7JO<@P#IBz?Y^(R{`&hwNBUvPm%;%^Y?*XhIg1`*H<vqJCdpYDRQvo0WCSg-d!pZt{Cd&+FB<_i2 zBQG#Yp?c%$^U|DwHM7<%OS2ueI9oz3a~z#wL+qT5btN4$X0~H94F60VwVLt9l>sF?O%kcZ1OoNiF;Xri4Ml;&k3&V#=n-;N_zO_jCHJyz%P;jyJ|??q?S zuHB3eYlF3QA**x2$oO)%@8FrPk@@*W6{Wo%r+fMQB`b#K>u%Gk?!EWi$UpN-&K+OEQ6wcj%m==;I3Ma~1>syfSD7JDC1=su(QK9n=U*}t zJ0l$Vuf^lM_ekV_KBrYhj%HT+{Ap=^Uu7oWa`FU?Ux)E`V*ExH3b*)?xS7*v+9E?L zK7AUUDRx1O6fh&(W{)NFSfjT?J6%RGq0YJ5j%tDCw%ctU%cgzE6FGNdW6oolnE2ZD z;7-$=h`kQ$86fsvcQeFagdEz(SIN6#WYkS*eAUSle2cRtGp7JHQR|C*39;jU!N(~0 zD5aW8uHPBWp@|10hc1fd(3G}LnMA*S!<9oQ#$ic;+Pcej?oDsrcIAizVaJYpr|r_4 zaJa#Fy~As@S@FNuaed^Ukv|+w^I-P3S$*~|3B9!-7Po<(BGweHM|g!l+YUkAHVe0% zsli=@v8e5k^2{=9qhntQJB#tlETbN;iBzqszjc3;qp_uNmaF-amA-=;?+JXdvDxDPeo_v4Jww(>tC2QN-5qf}^_TcM4PqtV$3X{d>Ai}8HVUf|A z>F-LXYCEc#Jj+fR#f*ehyUPab^*CJfa7N@CT$FeIn`yALaRDI@)!h$ ze>>0T17tLW^YT+0@l3;Qm=E*9*|bvSmjmZ4oXn$&yzbYXIrWtYx%{3-b{)Ab@)O>L z2DF&>9a|as873$M3$Zh6)JS6`z{+acgQVK7Ovt9#gOq6yf}u$EKh=)pt9nURR<^}! zDMrD}GVT84_i~Dgaz^L{d>2C2*oi=#uL)QB9Pn8<6@?1YyS|b3cI9yj#A|=CSgIpJ z7QkvpP|`61o<T zws5s}TTy6H=sZuJ%j3oxyv+aaK%^*(>+`8T(}K$CrH+;b8@ocI?KzsgdT?N%YL&x> zq=TIM+#~?4nb|}-R!PG-sqK4T)t93X`nPiJE3k|nsI6&S+cW7BxfqtN; zxpT3@(bK&;<7RJ;)8%x!ojKn3{rsDm4?5GZdHJYb-aFD7TG3Zp5E^Q%X`R#AIs1Z( z-DzM)826bMUTJrtY!4o&{!kAd+^h;yBQhup7d3i1l(QX72>Rhvw5R1FQHji{ zi-#Vv{vvPx?ETr1M~nH`4ZO7CYmv#%eh=p?IjewdsHR@QE`PWL!>T>t2IT&zWnzO5 z=Ai9%z>=`LYT1G0)HZ>YAMjySunA@+whbCh;7CRfP7gm8&cPI0R9KKt`>bhR4^|Kr zyn^y_%&5kahh|$OtMUK}w&KdLks?<9eAkBYbQ{FHp?k|7eyO#yp>u)5j=h|D6_*U~ z2mFy|sx6)@e-D3d&!*ZoZ0K(oEvf1c)rR_O`3-0PF3a!3#y-T!iotskcrU>j9NJTY zC_y`UJ-DOPNEAb$ zZ#Hd(w6C05_NSh?1Km9X1JKtv2pjDWcU{BhIQ@3Jou~Oc1ukv;tlu3z z`Jk4cmW!eeypCk4vjX8`Ct#mBh`95@b2`c)PQ)P{xf<8m4w`K(lMbv55FIed)8QEG zDCXsQJy?C#<<82RmErf~c(Jp<+}tf}fwC&Y>2%ZkYEc14FcF1MvGF9fk9AZ=c~jS7 zr=zR8YZy|shXx(?_O>=u#M>)6Mmw_Y*20GFQa*2<+m0a8)3d5O8+lvH%nDx8Tw&2fr0kUFmLN_Y6;J3ZR(yVXd7%O>8dY8MQ+2q<|SRl#a+YA!M?^~o5g~8 zprm22jfx4dpapmtLR?ff1@UuT+YvPr0gB*pEj$vlfreYC|a=B_m z`np=qOtkBvkD&NLLCe@LHF8%NWN_gF#TOMH3p`MKVbL*u_|Zqt`QG=4CR6}cYqPaY zSl~B=Yl{lAumZ#Qi)SJ)udT6hs6dfFNr*?wN{cm2{TW?3#d;2wf$i9bq7q9ESxpXA zD57+q^e;^U36F774`?qid|M(FS*Zk>%KsSL-$qI)MX52&#xLU)!it9 zI7>E{IlXSXX0vDH)f< zOY1_Rx|-HjexLCM_|8C{_AB6{$^lAzh#@con$FCKMcU@`U>~1SH>*tD3Pf$mD4_DI zzq{*5rK7*Tq0WKV`g)Pl@!tCO_WHVZ99$p4U4Cg6s^{ zYJ39k#tDF^xf$L9Qx?T@_zU^W5Pvif8R`+Q+_{(U*oU6qIQ22VA937^NG>ucSs{^| z*+S?D5*iDjyao&KYF^qEsf;UqpqfC*EE7lmjCx1o!XtSL*A!n`SyU3FLWeebN921P zi#pQtky9UQr2=O;J6rBx8L8VIIt_!#ZwG?1F}+g-P0>++lvt&B(*oiEmBvco64{v< z0e`xrTpXZ^qg)(sK@Cc5X^9m_vGz)SX{pslY?RupjeOj9@rJasiye-dgF~!es@A+IF$w*ZZBSaL?VwmDS z7TB@?Kb%Np}@10@+H9+$;$^?NAi#2!yZv{Fq8r@g$&w0_H( zrcGP!J^5qLQ~5aVYR$_pD#ayt?(A7EmvdEEUjDP79D8_ZE5n(Fyt%Xcoi5kP+47=w zjYhsR%I_*G>@O?}=Hunm{F(hThK6SJ&zygY=~MB6Xo9Wxo%#=L0b+Zt>D(SvjB|Uk z61)Sx&asuY4QKe7ZH%@lC-H{Kb9)+XRwLGmqVbfV>fBzHIJak|JOqLfzDi|tRXV2G zvKXg0sM5qAGS2N~j^Wte8i&sk>|P*_?KN7`jAMH4PUQoRa=9QYZdBbt;jt{uEuj0E+6v83PcfYaY2}F@Tfc8V3&}#01FXL zOeczs{V!VnigU%K;1^}B*ybKB97TQF01@?MdhCK`L~KA6@00r;;%=7st$O-7<5r*8r~bM+;`K)I+{ilShsO`cQ0UUgsR zfiC~%E&i_kUFlcs+_B|3z9w?dS!dDfwsXa~3!MkFmHaaJ&9v)>EG-;3`8dB!tRLp_ zg?NgT&VA?A+ah-9S7< zhLWk{N}Q&PT>Mv0uFLJh|GA#Of^Qv9o-XDmHTvyYnoDrr6=&@*#TdI#6fkU@BE}dl z8qFh`KAIV@Sqd;$WR`d_SJ)6h9L6#zCoY|d-;t6_r2AqIVpy#W>*MaQe3~mB$$(Sv zi`6^wd;0!`3$z@q7#RlgRb8C%&aqe9tGlYZa&Wr)ww-Hs-8ys&UT@>iT=tpa&wOUp zXAUp^%w_1Sq4vwP1_;d!?amDMVl=VnLRr~%2AxhkTDjv!CT+HJ-Dg$!^4-{Qg^njlhWG|)h7t(n5IbuB)d1k`~Jj--Stsngw zo|R@PZNRVDg7rk_sgK};dl1{|kx?IbB-;b6sdN0*P>{o}J9dCRn$qlCE}hX2D`%S5 z5x-{H5Pq8Qq!eWUG?S?wOtadJwRU0$BwCFPZ7mI*jh$tc)j@2^(J2LamMt53bQhIr z;dP<592u;!(zdG&lqRjR4Q)Xj9k*p$I^8`h&zW=4d4Yu+e3|o_4i|fi53lwhWa#d} zp_OO*JJOf08B8CXyLDDJzhHKb-!s_n_c^i;@VU?12feoJ4!pvE7 zw{MI*wQl=F&7z&lm!Gj=^GFLERl}K7I=}l8>T*r&p|Bq-L0(%^ly9-=r5rDq2K{-;XZrppG zt*R>6X4O6Zv*urN?W+C31!s+~{7Fei)~;Ho&sOo^+8ImyRkKQ~W;Nz+S{AIUEzfQ! z{qG&m9_=66Y$+@FQC82|+6IUN9x#YJ;vdo6KvrXLW*bfTo(Qo(z%x-QH?Wq;)53g1 z(Zj~tm1NUyrV1OFdv_#A=yVM%Q!=?_7T&xAUzUMdC1z`(LK*0j`R)t zyxT^(?||1bJnXy_r}|oF**(h^uUh2u?H$v+2fX%?Wj{2X>sx$AdiuzS*LTK}*xA0( zvwV3YBY8f0@`1B5mq^BR@{6pv2+8v3f& z`_s$W{3n?o-^+6HEeu)|&PO1Ot;1;PhVmBvS)7&i==bql^s}K~kPV(ZpB<*_sVCWC z8sTFi>&9^LrpdL72aAic9U7iCIK2OoeP{3L@9t`;tM!mQ$aP^37St9T`Qg?pfFo{w zNI6ByX0c8zigh$oit`1abQF%}yYAX+uHJgaisj{HC`FgogV!&D=5#^x{FC=E;&$-V zbL=p?Ae>QNmhZ74Pn& z9EBkL3d1v*C?4jUJ{PLPIDC?fXB3N+q4|UJAPEa3fzK@;wWF@`333XZ1#n`xip>uX zFnk%5O@oJ8QLwf;524hAk`tb^-R{C9sCF}OL~)Wioqf(F891;}T#%8A|1$kSByDcY z6j(NtRZ`iI)AJnkta-2!v!q?O$32hv$phBmjDJ7$$!*;Cxu20~E)7Kf6N&njSMz6! z^DfU{_$54?DU*RR98lp$_V)1VS7Eed!LL2d=ih;P&9{mxMWF-yMC#Z^{x4Bu%@OPT zM`Ycm6c1=|+iFI~v{obCr+noMJ6Re{1Nb^JXp$y~G6N2|j&8Bip`8?fl=zopeTWIe zj>AZCpd@iIB$Z6?Wh({{RDhfbt49PoJQ$|BsLY8{e##~SR5qkzN9MX6Bqq}*OaYY; zCe_Fy8|*Xc>l=`hG}Jd#(ffM5$0vNB))qMVT#we4aq@Zo+lEl60kyq<)Q3X#_&Z-z z(_qS`_)@~~v{g~29U+A;ru_*Gp&p?HtZ&CHRw!^&t}P7|1p}EPBJ;TTQf8H#d-Oz| zj5;5G*Rq8I&b7xPfB)RG(CWu`TaWsSJYJ8@VGp=vAmT^NF(^Hd?{vdB+|I&)2uwud zbu7qFi5OTyyYx9e+QJ5jv_C@MTBN}lKq6?R=b4zkw1^B)k{D-sdYQCD?4(jL6SjEr z?VEpQaab(l{*OP8_;uCQ--$dxCMZt9+8y=`_mx*`=aQkFYjs#(o zoqWsiF|46KxmWtwLX$IrB`I?;?|4~|ogH=tbG`4|G=}H9=CSxdFRmt zcmDajTC+O?Md>*XCk)K#FTqBf%MM;ozQ%5e)~8WdNBDpYQ zpfzAg4}APvdifi_cHh3sWK9I)GOb$s0xH#bY6Kg-*2{5(&yKmoqHlL_&Egtft*X<3Gqm{I~qVLl;KA9pGQs5y=Q>-TM#i=W7Cy^*i{#0=L|9 z%ihcO-F~})UyD3=>Op=szF&&gZdkSYIa`?KgR{^^k5L!Op$)2>$}Ve$m9}$GR}cmM zf1LbbKnt&qRAy9X@{Fng|5o7S4>PqROI|$ZMJ;%})#9*PZ`$|EL%$?jiF^@wRsl~M zILHs@q;bA5fHZjlupm*v_DMzup)sDi?c>>@u5vtcOZR+L=E(yY{DiN_?Z-ZBO?hN_ zhL#^VdBD&Aa_E=)KWo*v({;nXNbl|!sa9nB_%gOpt3X_*y#!jXAtJDU%Gi!AEHSzI ztR9`CX1&^F!6OD(o}J-LvwQIpjRpQpTtNQq|fYuT?`7e6qZ)9lA zcu%jN+~QBO+It(yYw>7=qp!YfokhnyQjuNku=dxN*50~z?+5CCPi=l(M@eCNW_{>S z{+zn@jJ&qGcATO?o_g@qpR^|JLCm#y=AS}D6P(3n-;FlT&PA=+E$n_9p8N87rQJJ` z>f0u864^GBzLcW@9qcd{&aOB;_!#9>Ko%{nE5Grl!Eq{0jzbQloQGT}vTW;_TTkBV z)9xHSS>)3ThCjL6r{8qu4t#x`_vG4P?JeIYe;C!reOoWRboG|i6RVFL!OIrRgq$JP z;yWO!@GNb0xPsy=Wq<1HDa6{*LPw^YOzg{wr}$#uKM{z2I`+6GdYip`1i*P(NGmwV8gOKr+C6q06wWEe7PJtLpxh$J`y<17Em*f?3 z4xXu@)i6D$ELX#2(RWL`n%zD}xD($Y=E6ZfS{*meYY5o)#jlSKlrKE9?&Jm0MRe~% zxrnZ;s%W)g5&hx*;KDPB!uP6|}uFl3mo1v#ZvTX8XU~y$N_**L5y>_Bl8> z2O|l9AV`29CITQxf)hx9Gog7VMGe+uS)v9@vgFv3?Kn;>hmI}BsqNU#oH$L?q#16K zzGiYJyS*8j;WlZm(+o|V+oTy@(!SfYxlNlJ*B1Exwf8xg2ue!c_rC9akG44OaqYF& zUi0em+_>RjVYkPsM)Dy|A(1*bur@UWmsMk0=R5FDE=VWUiO)f&F=t^f|~1nHN7SN%WP= zG06DvK3tAs8w^#z+eI+DrY*XwD`B;ziRH(9Uq}B1(f`UzkbltcqmmRTwqQPVqxpmd zJHcl}C;T4x3hxLhC+#clff^8gt5795;H2fSFM;F%VdzbRRSDPJ=72E`+9!qVOF&e+ z-MJ_^o$H)*O>$DMzpp!;Ohh66_~I=|?2In&D0FDt&%PQZxU)Sskd*@tglWaK#<9k= zMx$tW__kFd=CvnXPdc7-Jz+s`Vr}HS<$vznxR9j)c{6yvcVXksw(9tKqAl3#2c@5| zFR)+Y^=u%ar@FXrhlnW@VcdVFhB71J$yuAF17E`folbTa#$()|s)6UT~h7n}DRzy5XYPqa4J=ey-&#@dp_cv}-B))h;`D%pI8uR|D^a-jbe z&O8kqsQCqZ{^4gmZVfCZ%z{|dZ@%-|XUNms_*vG!=>-kMl2oVtuYdjPZ~94o7eC=F zACtP-zhFJh|l2IGE_Dg&{j^Sh0m~nkgcr*{Rol*5-l80s@Pf?|;3lf5YyA z+ER@Ejb9V49LM-y;`{9^hJgLtl@x!d(w2321Q*1$F=g1ZFMa>v&)J%-0JdUv-FrW4 ztANw${|4@<^a*4?`<|kC9MU*4t)BWkGVByvXwpl#*X-r^2iHumB1Cz#e*hWK-mmD; zXN)hGASdt}jU{njthzPYX4rG`5k+h6mB!h(Xly>q&-pCig=BZG%j#B zffu~DNrdZ$Scr^S+%!YJnk1$YL4)?3d?<*I>y+8oJpqelhsC}j7t2J#EuJ=uWxK^j zKSjgxxBTADaDFz>5om1*iEprv@5z^8hjD;QyrvzN^asCWAtcpM9A^og($arIIr~+> z;MNKZLL^d?=PwKjU|1-djkE|66+Zz48E7!zY{izK(STtbpeQ#+3(*)*L0bWV0A7EW z9k*7hfhG8_l~KFL}Exi;W)RDp6>Q2 zt&y&-$l3*eI*6qnL+WWB6kGxGKxYI@YH^ zZH4%%aA%U%FSD?GN)9XI5M5fN9zFxbN|GW5iX+T^K!Tu;d1ST_PYCxij!0Gdp@J1w zNElwMEU9nH=f`z@dpG>d>nFhzl2kGC)$S6E8ne@-{AF(eglpXlmc#{hx z{`u0vd8OkGSQhT<&(QZvN(c1XZ$MUL>GJ@CWwa=*Iuht$f}=kAK81|OuT;Xgzi(hq zh`ucwaery7GnJpEB!|P*w9o_su^t`=taLQTVXG#thL-tlp)953?G?i7VrRk=4B}F4*kI&DiDm;JVJm;_ub(C~nuJSU^>raBD0&G&s;NoNz>} zaLQ@xCVerF7XjVEURA!=M{$^~LYGeI0%2OQEI}^S;Mb#R>v!0hu3Hyfkh$Qcz;80P z4Cf{zb)=!kroesP6P+ZRuG!99yC2a@N{)1uel!y%l^21(8!B@a*Gd}vxZpG>*=;)H zH8vEnG+GSza4Tw>Kr#Q6d~;GU0>dlVuYWM)d5O>PHbI$*W;+H5f~ zCgp=79F9u|`5Dldy+x=aj1eUj4>eymXwNaQ!tfXXv+IM`fLFI=Fo-NYC%;J^R$8#* zKrOBtR7Kb^Om9-c^=dCt{$LpHo!?b^BX9}C=Sb|^bN_#Z`Y_$SRSm?~HEfkMpF{G_Trx?%tonbEL8 z%7r4aqgXuKiSL@F=2(idXb_;2TsDBy9?#7Q8HyeAMPrH*2&{1&9G}tD-a&`EEz{Bx z*f!GJISv{FI475fDTfQ-Of+Y$^zJ-3)S_f&!-)aTnA=Lu#846+ucAIiIDV)YH`;)~ zTCLH6V!O{LGsI0J`B136A)P`dVL63$2s-s{izMA-*u`+nOkw3Ute{t+BH~3oo6N~NOoTO=NHnMx#XnCr90721r#$8x)e+)MQ~a z+f#od9CRZ93kR<6kn${G;oux%VHSNr9{d^g&K|Z zMm`pDPn%;|)k_%8iU^VhqjgqBGuE|JdB#}RI^_2rkuGyAh0j-%SydVnUK^15U>06w zg@HgI5lA3Zl0IxT>wIN-Yz6tkhq33^JuQ|gM6I+PbOsWoe#XI6I58pXC&NX~&Li&dd5r7hG~2jKX=YPLahFDF?h-;Z0YKbWGCAERA=!^YcL}n4 zuH(+$EeD!B!%hJVYi$CSE1iBu-6mV1xtplD3a+)!fXo1`r4$T!JYGrT8}2N3)bY`-?+9@dX1&Y%F%LV#b^ICtK7L=T3n z9kFVS1XHobr?AV*(z;^NgemgTHeM^4sIXR-i!lz+>hT?1q^Q7v;3~#{J6H#P*9BYw zg)=LlKn4>Ig+weI3iv%u<#kua)jAj{Xf?QG;YXak z(DKungU4H2HHLsEvaM5Y(luJT1P}(r;^i?V}+4ndO(uJRFO99AkR0B zUjoK-{$hRNULi7?oBL_EaBFOEQg}AfG!no3fE~}1$oCoJ=lxE;PuNo_XJQryVdQn8 zWG%#&>&Q1yZC)WlYOv+x=Ubb#zjDK8Ves>c#I$#ckZbgY%Y%cD^URqbfybrHUa zbqRc%DNc46_@)Ej>egp))tp;KKvV6wB-NM4r85=diZjg@bK*=fBEQr1T7pbk+~>+V zjq&z^PCmf-3;37dE{pMy6VWGZ5zw+?WIR`AG#n!(M&n6l8SBj?6OnLx0F_)?yq!9E zg%ZsX;}IVz#uG5y^C@FEfirCwPa*D4vfPeKUKut7PfFuYmP<8%0T0g@g{z7Atmb^= zBR|V=v)jN;D`*_n58#5auz+QykRf=tP#_uPCzw=_mIP!T0dO54%P!^`_5ju4=1A+W z*U@49q_-kWAAyp*Lcf#}WY{{aXgM)F89R&{>PSKVHV=Cx0tR|0?e9c>NjR ziFP_{v?lI5bxq~r^omW@4^X?KdKKd4Kd8TeNSz{)^oVO_2^Nj?h{hX}IZ3X+x48T*2UQob6%kX7xU8F{6;}r za^|#7b274oXkfF*ngW5H87fhDO+z!z`5pq00Twhvdo1=$xqst62Z}LT?d-k8&?E=Ej{zq@SH+EnUQHtQd>% z0vQh}I}XfATzMqRRxTMd7!w*nvk~NCL&!6|;f#5a&XFPg4N}!qPfakVeL#m^-UppE zq}�nn(+V+`J9m>G_0aY*KTczd^As?m^F*Z*dHkx6MOC%E_w>4?H`zjJ4GJUR)?=Uu4{$r*< zDBjm=Ns)D(t}R8@b^rh&MovvmjQ90MI*>cK#f~C}tVhmpixBohw$dSfH-rs>dcD23OwklSchpYo~M$-t-$es_B{bp zsd&C6yu{baT}eKcl)VzFHbZwO#%uvB#F(`Hmy7jh>xZj;_KX!H@&hR%L^OXnb*6an zuq;r{Rg!2TtHEe)Ad3hmu^xxH9?E?=%in}Cf$LT{!jl$7==I@pk5`sb)6}HH#Hmn5 z%ql&u5993SvMQuj$l8x`ec$U0N>cV|z>|z!NiaE(Cq#pqCLvIgdd~#IBY~dtt98Xa zs3#=6LW;-5wOTT1V0msuimC_xLCFEb3Wb@ILdVQU2VV(#dni4j*BkLhJjjTgi8WJU z1k_{T9@9L(voC=pGH`x2s$6NQ;h`TD_M&WG4{9?VDg0<~_@kYRX5psJV;`zTtbxN|MgD?DH){^IvT~=S7fCJ^w#mr zzm#)AlxC!Mcx)=H8=!owrX5~bATMrInY*iR0y@_$A-iEBhJ|!yGBC=u_o7Tz!vyp> z&BvXV&D<2~Bt)cMK!9Rgh|-WxSy*zNEXCMmLBuIPv@|bL)&6lr!=@9aFi|ix=A_ zb1woezgYgMd{Ul+pHx)JNau>pF1xHs8R+l)0E?vz7QP>Vf^Qa&8o4Q1;l{ziQoG&S zMgnpiACN7-4HLDdylJsp5#wNNmy(?(u-$^#=L{R=dCQR+!5=)-o+Wvs6?hJs(Mtn-%Oy7X&L-WeQw+1LW=sD_80Y(i~St3%!DoEUzs z1#AgnF8oe-tx~^q659D@2$L}F8@I#{iY?N6Ri_j-u15Kff*CVtPpAlj3Xe&mO$M6} zige(>rvkAwchP_&X`x9rlV;B{ZB|Y!vcB#=_P#5uhQsgUg-gA6dhd+?x%~V~yYATi z8D75h{PZ2u-=_5sm z;#T&!_jvAN_n&*&;;-&x!I3*g*t(Z)yaVqhH1hHW<->U6$lksVe&A~jyY?Sub0^(J zuj0UXLJ##ALizvy>RU*#yP$l^fSrPR-&4@}LMkZR#D#-_@etzB>O2zD_3&Csv#Nc$H>UD0mma zp2888^qv+J4d602XawmuiwzWHe2iMyUS%9owAP)Fv5eXw`0N6@LSalfY((W8p)j85 zpBXRYM{0D8CVY8~PHwLP@d6L{MnK`R^q{bN!a<;7%dc}%fXuW=PM>)2D)l=<#R0Pm zv}M=R1Ck_^Pu=NMt?NSBXO!2C;rx6b=XGg)SRKjt=IW4Z0iFi%*^t$;J5;R?p5dHY zSsf#fgX`$QSC{Nc#B1I53|p0e-7~-;0XveBE=$NE*e`q#>h^CX^Etpn79e4OtbtuJ zDKoO_t^|gupA@hlnM}Zf=EH0~Vz#>d@Y#66g<4n`a&9apGtx$^+#H{c%hY0jUoP+O z3E229ORp7oWU&5tk?I1<3ab_R_*HNNDV1s*Z7AuS;Z4y9ncyXs22A=HNwN%+Cpsl*dvQzM>GGKXj5EMhW@sXbVKWdT0bul`WBrN#+H*E+y7lKQ?9Jo> zN(=*Opoy|`4^ZSzLZEuX^OaeCbR<{rqhreZ@Q9G7xh#f=CEPliD*eiH zm6@)Zeq?ZI=?wQnxm}eZkdlyHJwqU`f<^hb5Ob7>`1Nj-+b(Aby!VbumcYLsT!R#O zLzE&9wP)G44<6jOYOcU{Wkne=`N=l`cau2t1q`P1^h)2jc_#|srCR;0g8{Zj}+N81kWSZPvagc z?uV*`Y%hkXpkg4f+j$lNU`<5;tuil=odA5rV1HjDn~!NH5NAL%j`p%ndIGfda#1Qu z(yfBdb(gn61dg<-WN>6$Ho(+~sx%7ZAmc6mY7(@oIMhMe-{flbxq?kW0RS86ItU;5 z`4KIn{jZgtXu~?Nl0C%*bN8f_;dYX17?OH3wEf|MOuje2(n#VoeK&?>)SEW;E=%v; zFG;TuZ)=~8>2PS$k@?No33g`Hj<2%wtkE0~@O88Y0N{8Rc710(owtm*(2KTvv^!c}F4t^j z!yvflvId7BY8VCygC@qu3XQH0VG!pL2xr&ucPqeYnBajPe0q=@BLy4T4-|rL?FHmg zhDj3H7*AnA$k2f|jnoGq^;~Ph50zK@7!V!s99>@zBwtLY)46oc;}vDYJ!M^K270g@ z*Ofwg_Q>$prW|XINh~kYMDZGiS4ajLOv7F(sQ`dQmCoQ9t4hpTQQ~GBwdFiHb-)$j zdJ(hzfnsEM7P`UV_4I;zh9t4Lrv{9KQv^prn*|(ceQ*902chX$gW}1YP^+--a`06M z^eTWfm)M}kRlpgR+04|$cwsaLKf{&4#KMd_utp)ePVgkUo-^qBrvyw59J^44qG_%M zd2fi7Cfmyo0q``Tf31G74H;bkABoWa>L&+&#hXbFX44~?5#t!_a0oHZAg9p#AkN?k zu!^s(J%etfoD&ZOYdnX-IQ5w*Ck+c;0u zPprgm5+~ZY1Ao;yS*q&Lux)u@K}!y9=UE@$|{CyR7hvc7id6S9GX2zMA0$oq=mt>5Di}xRo*<3m!>dHGaO3z@C&W2~;mpBMU?m)E^ZO8VmvW|<;bGwtwrV-aqvT^odwv_+Q6UvSd#7%{29W4sGI}%Gr^aw zzz`ChIEH{C@RnfBuHF<_%Woj~vbI*guZ#+YpFQ}q7}nDS1|cYQOBc*5M~Z27+)U{O zUmt@brjnnO_|pzQT`n~i(|Cdq5Cg%$=(9>o$ZN1wcSXaPh;ztYu@t4qjMc;x!USSu z(?zy@r6x%x`xvz8ilwb>m-FVCSxwu?N`F;JEA=(i$S07EB15LdOk zW-KROFTFdNFxJgq@K95&eQ!BAMWIOKQ`ZSf}6pi)A!j~y! z4o8tYpwK)dp0p%Hrl)sMdT$yN`c2w<(rgXU>?(~xH_HJj<#x5P>E^)n@G(Y3+sfhS zJlci7=K==ABMTS^9vLvG@W`uxAnXw^sOZ)S27*5pFrcweuZDKjWv;PiSOWqkfu}^J z#+G?AkXhnrtAqB@339=CNg|(Fov3t4AQ;nToH7SO@W*VjYiTtjiMOBcajefd+_BqHsaOU8iO> zFkSFC8Z$P7)-%iP7Ggb*+c^C*MxsXo!iIPwi1Dvk7mZc+>cQScAPqSa888)j3dA00USL|I zvBdg~v1p*`^%Fm|o!{FiKVNZg&p(Yq0^Ax;O6>AA!li@Lo!pgNpEF2 zl=KFA&?Tr2>uNrmUCKrQ1f|utuDv!UJ>-;tssS-S^HT}va@?DMDbWD|Q$Yv7V+x>_ zuPyu!SAnJ111I2a^8YnqWcnit7!j`{U_@i1?h(-iq%f-4WqBSvcP4=>Ke^CzuWa0i zNQgI@(};8`*@a*dbGv04A!40~_Jgx1+h^b*NXTOvTNDKpyC&=j7{uuX?y9Mb7op#D zbxgxar5jrU>rCNNtMsePM2a|yuewGL&~n&yC`_R?N~&zvp?wKoYvHd)cLVlK+Eu@S zTb1Qq!0J{O={Bu8!pkpf_A*y^txj~T<*BMWRl@=I8}Ku;n^X))irHZW9BO-z_8EW} zfcm-C&yNEFw5C3mP3TV^N5l$+v6=v>JzFl_C}c>gJ!L}^Mysqlqigg4quk#QON$X% zQhQ_-dyqIYx*y`qXsus<8llvDrQ~XvtzaRY*NSNk;Ft>)cgu*tqx0gRzgDm7*k0{C zBnb5O%B;80TUdD#)%(lKYvmsrcTJWMGtNef?m=dgW)L$z4fO{3!4Qs)S2ilvED}@H z=1!u>3=gn)9TGH4>%TZK=&@E4}_ zeN@Ps-K+S-3lRkzs>#>kn)TyYQ{~-|<;HYsU)dVLl&fr~$`^h`HzMv{PbS@!h<1kB zTm7CsZy&iM8-DNt&g4@i;QSc&c3JwvVu1-JLtL*QU_yNU@8b_H)>&;S2@W8xO)%t`;z2$?_^%7db@7ezxbKD5@S ztU!cvjI|#gmj0MOR*8D1C~J%<{Nsr)D3sT|vAC{V#G{%1_S}?%M+J@yt2n$zB9Y!m zZ`uF8Y{Q|L>KQIab@Mg95jud&(h>0z$3VC(myljlL2d>LttPb-Py}y{5n3H8nvp;X zgusSPVE14Ql~|gL@N12SUz03Ix`G}qH;wVVy%CE+EDObcnPFWp1l-=(<(_61@nJ|V z)VK*s%y_)qFTawnNbHw%PPAXLUGx~X3q+mKV}+R!L_@J-+}Wn#o+G>f9g>*A4&MkW zkUE-?I5g1W4mF3WqRz{ECOd=J^Ak=Eg2)Z)XGmUo=Y>RcaC4)&L$iVQk6L*K? z7jbtej^(mcEKc)#1MgMn1RLKPC=QL(A0$Rp-4{pgeW4gXabIX`RL`Qj(r94wrd)ks z>|d<;)5e+~;68EhC}MC)!soTc+49mARU6_+#vyIkL-wYaU*kT(m2joF|N$FwVR;ccWnYVl&R%9f(M(J}aYSJSS;Y$xK zTu`1RC zFussHQ^kZGY$Wavm?=~ZB6t{C8u82^Ifch-;khmVzwe{(xbh4<#*srGt?^jY=UueI z5&e2{Em!3>F zDKytWx^0(na<}5WRy8}I>qHS&FWL>A6f@Xn)HNz@_OEG;*) z+n4_YH$db^7ZTDMR1vA%3YBif?f2hy?8uW((w#8+co1ifR@MDZN(d1bS>&xq=8m>A zAPT543z`|C=oIFttKJaS=`cO?YGMgdRPlbafz3jjzVU;0^_R3bATQ0Yv zxF!UUB5NdBEW~(~K_Jh#@ey)|e6eI#m&OfHvh-p0MfNNC=bYwUBhNMhbK_J6dqv`1 zQI3K;Res8G)B5!r9sKi7YqBdjVznldT_f`Dwe$09>0fGaFqIw{Ah@hqZkES5d~?Kq z@e@IIKmc-r8yH1AA~(GVM?9FK;gN9!BEuSO&hPiP`P;m{7*EG$=69RHm|FSF#F*f3 z>dpzn8aLSdxfwY?_(xCxWr6bR2B!{fn#Qnzw&4kB3$<07|40iC9N3Gsoag5iFXI3* zDk1C)r27c)nB%xn6r8HZuCdJ7kZaiHv>`1`mG%Q;`7qXQJ{H*##o3d!zt(t2_3@NR z9!X*zPb?Peigg+G)Jo3Ic8rIL(76A>@_3-au+fOh7FB*O!p>Q9cy4guZjaj#JSY#L z*VuC>#y!IGSGvmMzV~S{tRM(Uf-ZT{005j6m?qkq8Guzp7pfa`XfB-Xx0{yV?~UONVi?_0&|YNe|cmS4HRs1Ig(Php#@`3%-{fp;mK z0ejLtrY|7B_Oz~fY_5sRk#?-hjCFw(jEo|&Msg(J3My&Jkpk|Ggvt7L1oB4h#4})y z2WoA|u2k_3wS!T+fKJn#kQ9TEOR26q+*IAZ7$`D-*Jv+*aTCW6+Ka&BN5ukTWr441 zSqK_{%xGK=11TyEtJYQ9kdiToh4J@-ue_!Ome|sSRA^1!rphuYdl}Z4#Tqwq8p7>q zoOUk>8q!y1BBT3f`WcW_OyF>tk|5Zm#-auwRa@J+*|4Sy5VW-w9u@7a)kZ|)Zqc3H z&hKor47&}*wZek9nsrqSe?-u>V9@IchJ!Vh)=FL#;t6h+O9oG{4zk^Qmw!qxI@kRn z6quaZ;nFNE)jc_s1`Y-6j5ns7Q;zWVqkKFQZ-2G?Q_4)lGqvEVa{UM5f8?dQqdzJb z4&%HNjhl+YH=EK$s2Z^5_>DFV(GurPx}^%8Mm)pvS4$5|$V@;C2j#M?r`R&~3kq#2 zs5y3HoHZDy^5#E)ao$+k2I%1fpUImFK8_ZyNm`8yP*FI*c=Zjx570DFBfM$PL(2${ z7lK_kv>Ccp)lsq+^cq@g=`;X{TSJa*IBEF*!LJQv>I`vwM!I?LZH?_C%fSr~{!Jk7&}aDHpH^bU3)H#V6oeF%^QoC!}g z<>LmPB0lLeT+ai(hY*19bOnAk=7YfLqWY=44LqV>>XkN0Pg1VwN<_?*oJqU7#}|{9!jN6Y-+)RgYicTWyaDLdr$mbSQs-PF5r zI20P**!vdqoBV;fK#6wFrHO81}PxkLdb^swm`stAHdQ>UbpIBTkh!@c`uX`!!= zNo$K!6Mf@-aPiNN3ZGu5U4jRK(KdgYR3W|e;SBy>QYAT%G78_`d3S9e02U0}MD4gk?9XM&_2rYG&p;U}nXpO$4{b1vC~u`(o19jk?(;UQs9B&U z3cj4;!^xvW$Ry*NV*L-cZ`~hm<~4}=qrqrYRkQEA%r=)k!#+j@v=g0FiAa{lFyD7D zAL8=KkBscYNgy$1rSKV)lCYenkUdf-P-p;E^8|63B^EhF?QqqWuHF!7v}B3W&B6?W zXI9iq(s4|Lt5qwWVN@cEFxH4^fmQ!rTDY|z&W>1rf9X%%ue!;7TjzXkt)}Hh-sF3I zVNc%AK4R{t^zeZ*^1-hS_4N(i6&Xie2h=NrsR?xR??ER>7Esvi@~f38={tgkR-^%x zF-5gYO#}U`dd+LS23NKQ^=#OhVyG(uggG{Ut7@Kpb9_)-aIN4ImaqmSdDDRObzFkc zl-R1(qzz6NNv5mv1K@ni!{S_5k9rhm!_&^zXgw(7fKa?uOyFc%SIU zEA{a^OYWhaZH{TPUN2;}Xr@DT99c!Ar%;1n;nBf7ASsa9n>WlYBbFL85(dQz_z z1R+ou`8XKsLPlUeBq~D$v=R4A`~~Ky@E2>Pca4FsC?c0((H&rRHHq{JUGPsJUk@IA zBWYmMwF{D6wW*XAlvLpQNe*Q)Hmy)WkYj#Mbi(U({k_2L4-WKZ#!Svd7{Dkl?zi$o z;9I(+VJ1-ouU*PujTE{>Tns7*+pI|WzXU=@75HR!8b^U!Xtx_#T1cxdVizaNdWOcm zfH$l5BB2I(FdNKO-3ycrmASYYA8gS&RFOs}&hxJ9S)OV|C%U68sbm(h->G3EP_#Yi z#A99?wOi(tltdvxd1LzJ0I4uXh*6^G-E6C#p4?iF+_fHY-(T+9BW)Hz1Dsb*?ZIxMTyfscPE)+|LRP`!rkz0(Ieb6Z6 zpCOy{Q_UnF--PM0f77;spae{UP<>ReBcT}73;8Z7l6RI$E};B!D=(734IUzyxXk5} zt%#*3SdN3S)j(K;L3?#EM1a%KK!UsP1SAD-uav`1eTgJ~wfn81i=l6Mzty9LIv(}D zd10rk=dJ9O14q}%sSkbVeCa=ek@GjsF~^^g;YQ&!p-1{Cc#e>ikyh3)PSQu*YU}6~ zHP1v=C6ljh_)}p-O$I|D)ow|Jf+6*5SNZ9Vj##XtxY{SY2+{`N-Ddtqp|U6zEQ8Nk z0<^hqgGyX*4<%q&8vIwN*T-3IB^MLe_3t+va5}B2@h!c~d*RrLxf91&-{BoQsd)62 zLHQL|9>!@hFeV4(VZ1{@@>TaC6+=5D7fANN5}c>$3$z0sQkGd>aUAg^32(wf6~~c; z3a zeel%zP_wVi8;ZEzhOI7NS$;u&HE6U;%CaQaaXcLbFV41y=qud4;WR*wYB9W7a30o9 z!A2t~D3Pk5;4{(+GZ}FtGM;r)4-5}$(GXiT+W|ezZ|l2sVr+z3RCNKc!DARoAG$lW zbHvagM;&x>B&9j(I)Hi`YvK`I<>eKJuF=bO1kpf~q7&EwYzfpHQdzc-2uf?RI~gab zj0s_upF+N%C9IGmh?^C5Q!3I{h(Q7-DHwI>*(Pfz(x++#@x`UuJnq5d#u<0PeRN|o z4a2nRWNn@ghF|Y_JoZ6P=^y1l&%i)W>HYNiOnZwbeELRq@X+ZFPfPpGvwv~!xpUYv z@}9kfw{;%8dmn7l>&U0a!!!oj$O1PQDu}CEmK?a{imEF}Finx?P&$K)r~I~}usr;# zv)MJX6Jy!Y>}WSUn|zsMmlGE~xpGH&^D;uRr}Sn8UPqO1&x|@pdN3nHA#w4@|I_Tw zW!4v+8Rx}xKyJs6CuuHUG^V0%K% zsBP`RE_Gw8x2b)88?$bkZ*PJE8$a#Z{LWj#UG4ta&GZ|_=BeD_{HIBzVOIJK*vQMz z>qUcmA2!f>rf+f`KWgR>jri@o4M4Dv$4bnajUa#%~a$9Z( zw5WP$xbmaw&kA&=G@oMJ^eKE zu&vc+IVP=_c1TByhvBfKAP}8Ar$7|27zC8V9|x>blS`I>GMc0!<1||Fi@0Hj+b&zJ zI#8W+E*myXjBnVnVaL|Z@Mu^+vA!_MPZysO&4Lqws0ap{PAVbv1x9H>NW~~mTz|%8 z6ekYF;7^jGe-q;I?Z>u`In~x+d)#`n_l7BP+IDZ#aB^MIAx^s`rk!fpH_lJnYV$Vb zduL|e8%(I40zTQ)AluGSAfY-R&KI}th{7>$Q`Mor<6Py?Ia1gYo#$fVe5mt%LKHg&M6DuS-P9Zu6a4u#+9QPR zYE7YgBk52+W|EPj!#CU!_hyqLI=!Sb3Pg3H*>nQd)(E58htTGv3b{^E_mKRC3Q+Wr zl!G=(VvjX5vokw0xNWG#+H@w-s{-KB4U-OQd~{uU+aCEWi-)7#s-qCuIO$O1BkR-F z!T#xQi zaZx&#UqTMfUr1>wC;f5J9qDKfw8F5&wuhOrRq*am!K0F@tS;fnL{No#3y82GKSLWS zL4vF3>Q^)b+iemdYKXT*VbCqtk66_PWEl0ehX?z5LP4rQDWw@wFpBgT1SV*_fWfJB zKN~=HLYB@23Qdexl!Wq!6DE%BFO1z#Xtn7*v%C7^eQxBPP&8|xrT2YJ=kkwte4y!N zgV|2IZD`x#W46NB*bbX*Y^*S6iB0bv8D5x(wz`^IG`F)SwtePE=16Y#NY4!mkIv=~ z%*1~=zJC4q*!p$ALFf>lm6Pm~&^XM9gq*FozcM=yk_0akB2SR=P~01q!2s6zLCDiU-~ol$=|(}I!j~h0`grxk4!70lxqZ9f8@?j;c&L;c4QK> z$Pd8_Qd0I=K{b#VLW9rfDf(U_c2Tm-_d{OoAMHm%2l7tCFj+(yhCNPes23R*0WyD! zmLqZxg$!g9RKUy^`27d60=((eXrjdr6l9EEIdDC!Z(=9-RE8QcaK9X$n<#Y4R&b{_ z$ey}oU<55ue1OT_rSD>~`ibGGVFaw6zS$mdIPu{K*l#Yq?L}L_;X-K&N1N?sxAE^? zE=R!jBFhgCm;RdNv2}1w{{r6lu>8D7mPqI}{0kyLLn~;x41#Ugy@805q%FdfK3i6S zH)zIn4drIVFexFBq8A)adBDmrz(7ZW3%Jmc8Z~{bSkGFVMN!%#{orX2(|Hz7B0L3X zGj$?7x@OU}`#`-dnu;JL=v1WpfF?{r+*3J)2!MACB!?7h!pbyujqZ>D?rYp10R?$#;hH?diwM21mR?AWsQm0~%<&RSRfo;uh0d8mV$|xghi#53#j%+m@}l$=ROc zqsNg{-Im_6WlM0pb2@W!^h6t;NZ+Liz-nS*VzI5!+8hWR zQMr)!yWoAT%j3=jb(eP96G>!(`%j;~v3sDur>!d*YdIY$WJ2m4eBbcC zk{`gnV^pURuFuGhhpfe?AyYZ*bSwhOnx!Nj3eqRxpOW)Y zooGS9NkkM<2-^x($H$vKX8%~zr8^$jg$EmKkYZI`w)Z^vi-(#%ZvS`_v>u0N53{qS z|7hxTxSS{-)Y-%wr8nc3m{gX&$iB#a0y|DfN}?{6ACe$N`H?V|B=iAdErEXGa==6e zstNh*l!#m*DfSSGZWviYuF}|Iu^uS*v04_;Wf3 zZc)Wd)XS@;s!~RLW0!b-W>~!KFi9WD{ zJ8-6waun3y5Y@5|H`DCU03oZj!3_|90QvSW?`*T!RdBIQwvFyxr+RL&m%hthcw1W# zVN0sZI()-;>7_N8=Q8H`mLekp0-Ar)_F^9V2=&Smw#cJoZYn#i9Aa~EZ_1NV9pFmb ztVyOTikjRp?7T*9VgF4X9oZ>69LOPg+EMzg>&Y>Tr7h5&Q08pACnsj)x5j4LQ&1kt z@|vC7!tp>T9LUX1Vm%*ZpJ4wT`%XyjDcX^fSM4NK2~gSrluH=22lz}La3uFZW;iI~ zi-^BH!ui`Uns%AQ=c;XSNemi=W_UB`^=>exU~{1u^t+w9)dK4@gfR!KWh5Dxd~7e9 zl}SQ}86?Nk!12QAEIGMWn^WM07PBbrZJqW-W_LQmoji7{cykWpZuCqA6;=XBdUKrOyH;zr>-CZVuiL7D55iuHaxu2SY~nBk~+-{YkBe zzGw>RRtb7WD_@H-|Xgl{W_q0$9k&X!3#&{i^8h;&_dSwvT?$dMx061V8$0yiQ0n<_GChjk?8&ag4Md5gPy8#rs1>4mDTZgAn@JNYEH0wmv34AHg56*(rcit{}hLe9jq9h36T(z3V^H!O9R&K0v;TTa=YY| zZL*VWzq)R_9abb%Ry$8l9fnJ)i^)!le2b(z9Qr)2<_s-VKSuL@wl&p_N#5V5Mjv_VSM(A$G_HbEck zdp2F|wsw0wZg}1k#*ql-RM6=c9Z~)ZM=Cs2fY+o2n!2#XWoh*XB98_5cL9GV z{eoq#{M`qFolqRL1UowD%j|R>4h!3-rwY2ww*Ab)=y;3M?e)5yE#srrKRmP$K&1CV zTPXBYC_rEEeULq~>D`;yx@g;YTO=A7Z;Srpn*4Zv4gZ9@wYdBS$XPS6v+jXC_chXb zYlc%FdY`%FkGqpAP|!gEAnyZ@s(>4om7OGZStvOXgFnU_kfU`5ZvfS=S1sx#;#wYp zvJ3fQx$H*ek}U{&fuU9*CACi?x+*>#B)XB=P>607VQt^w#lg4x-q!Lq-`n~w_Py8l z-j?_J{<8EP)_2Ei?qq?|Ki&Bn5*@JzSUbRwrJ#NLYy2Wz?1KDQ=e<7-E27A#Mo#*$ zAR=Qk?;As35d#n21R6EUcZC>K;272XFq}OhX@l;PTMC6Jk`B=*kBInN7}}emurt#V znz@LGc5hTZ0=Tx(xNKbK<3g^}te41nQr2uM^FrE%4Ih=M1O66_>nbOc6IuKisqf`E z=-M38Zt*&jC@-U02KRgA;YlD`Wa_E>fiE9%OQB*D+TmznhNnH5{IlA{G+T(Ovq4P_??$#-t5xTlAfQ9bm z5|~l8s9B*3<-T&T0042w0sy}vQc@VrU_`ZY>jPWFT2?kifo@EMs#CsXEFDMHGYFy@ zcctY8m5A$y&dBhn#fpF4?C|WG+Pl-8)3WRL4^Q}t*>Mdul?Dc9^s$TkW!Bw?fzLh5llZ z;ABfRM5=aqpyhPkmCHOxpK(c;ey&dA(iP0m}ieg&#zXk3WuXo~mt_ncJ~ zD2XxP`0Z!4?7IEK8-3fcj|cH<*fo(UP2DPgrF2VE($nJW=*jn)9Tlqv>L{?bt5*TWepmG&8;F_yOvl-{Y~W(b14qNu#@H zK%J?@sYu5c{o@BSeUb1NS|;!{78F0@-o1FAbhF$q_Z#;P9?YN;Wb!!0DGidXf*|t8 zgaloDG?Q{U8AG`*{G}U z3EJC^XGOI>MAr*WS1OV`oCdvM(?s&)BpR?wl@`cNC@!LUlp0Iw&Xq=dF3CV5;NBZB zzmcq1N_;$gPng}+c~9pjy!3795&D+h|5<$8_8ENOAu6db<}ZU!Cz_Ki#_6Kt;^PbC za`M5!y(hIOUJ67BcvOmM$ME<+R#Ls}Z@i1_8{SfmdG5dm`oIpGkXz;Rh)X#6DGx3; z-gS|^0}3bbB2Y@73wbCgBYqM14pw3LVVeDv-E@|F5pMzyjaPc#!^0_Rkk z4Re7zkvH&ukg_CX|AAb|D^r8tQ)RL>aD7UUNGgxj3`uhoNEz$1Y9`NIwwqba<_Fim>bcU+8~3?@vq#&b`e8*J$(>9f`+7zYcGQw{q@Xz?GCdh&A_M zbW}{HHw}5wp$A|ve!}fZS0T6Ry0l1`m)LJGBpL19tc zU1UWkeoEJFe-w}oi3z!3H*TeO&l`)@R5G(`-R9Y0FNnp)Ub5K#x?+*+&_IGE5HEz= zS%FVRp^F4ZM`X|edm+t5qcV$jN4t{=N_6G(qA){5j@ZgL!E@~LadiP#^FCpi%6+2D zuNAlwUiR`neF&c*yu^KIP@xP%nEn}9iOgyXP-*2g5}?s>-GW zkQ=7^P9&EQRf9Ev2e|RC9IvTAIROAS+P(HNZgk*k8@Lg!!VL*?3f!=w&%FXS27ntH zVz`!|XcIWWXDxqgwF&|QheK8EBuf&Iquhg&A+cu)+(y^>_Bdc8{h0+dl0rJsVug%?qy-|U-_zStauWSEVm5&najXj(b&$;pr85bQ zNFbwE0XyYe^p4dL7Do6oYl*@4m**ew6|p3>{t4W$p!KYj-7%XiBKtid5A zicy$6EPKS`O1zJH6tu#m@Xb~Y(xaG>^xm;Y?mPOiqZf~E>%OD4bT9Dhz4GBJ?|tcY z@SXb|#@vZ9oA{fJToHSW&5+Q-w4=B+A5dIBy z9rBk@0pQ9b@?GNJ&uNYq0lTw&-MGib@K;|m=7`;T?)c&3=T6*o{M?0-j4>aT@8tiW ze#e=%f?jRr{!m+so8S@v9}C^OBoSZ8)FsO83~yLC^ueXU3PzOfKKkKNHQaCFG95g8 z0`RuSTe@1i?6fuRQbsyo?wQ9ZJfqqGX9NtAFUvmr;HE?Q+irI|ZdfC4Sm$Vd+uixS z<3YBqbKfK8yy9T$9cAPn|ZjaSwIk4r#g^tm!{k=mIgF|EI zN3;F;T$XT~-!ammun!9zip)6_)Op66hzCryTmq`X2UH`qAg3f8;gO#ZI{UCk*N_k# z5P8Pl1280sq2?A3i0P9tB#$E6;Ko zQ~`dZ;%)--6j)F>2W#ZvrzRL9h!@9dl?^@@mzUgI6oc7?V?v}e)zwM)1^pRBr`e1f zZOXr(vLXr?q=b8l3NtbELwnBm@A7HMvCZAxv)#?sNqoC0(>gVN#T$PJOx>E$8ay&df}H1cTw`a1>nfVF=^z>+?WoK@? za9im|V?ARq+huwXc~|;HM{aZ9Q19kk2R+Uwhq43N9LED`1nVN2OtFo51Skl%*i$0T zj-N7)TZH&wWnKs?BES(Hni1rvYe+ao{yQdQLfqpH-oM&N^UtzpMtA31b$wvd;ruU; zJ)YmHgPa_T%coD5ei5D6IW)8|5s^=qtP{DRDLB6qUj7F@5zo;CNvE%vp%}}W6#W(U zqZ#r-q@G zwV&KHymPD*&vdl(5A4!UW=3*|G-JFL^fc~9TG|YI4P;-q?|?KbGPt%S$cJ#DR*1FL z7J=~wVWChd5FjRXcEAk+wQt-mJ7fuR__b+@3Kfd36N=R*Bs#Kcp@`QPN`N82qI z)m(0LgWmiwZfS$?UTP&mC73t79}I>Ugj|ubAz1YX{ED-)bc1mT z4t$!DI3W}V!9s$=6RsP>g-^4$gVFnwnU=c_#Juh6+X$uP8%u9`#l;H`@H718@}2Tp zj#E5Rh@?wpHyBStGSwl`R45b9FX%yf4}7$ON`!mU2J;moCoocFBz*}|w?d`m=z{V- z?jB>0E*8^Z1wPk$2YYJ%T}gMu-P|mn3uO|w&at&u-ub&nAN?Ks?RVzR#?oy>9&x5O z0467MKa6YjH8}hDys$Z^ylm{s;4?DGunOMxf0FlLiyiSus_m2aQnT(nfR`_~A z<#UEYx5ZKL>kr-sPYC7aj-jPY3ZSbs6lqDRA#s1O-85(oC*mRkNX#}kbnlb1=hqOn$|G;4wiy>i3AeqRtO}I(|aWRZsC-|GRB*}P+ z>;H}y6oL#Q7Mq*wI<#1*4W{8fCMobqM5Mtpi*x#47}F<%b*2uSOnZeoBq)Odt*l&_J*NC4>=1=E32go+3a<-f#_T%0?Gvx!?kLb^OH z2@jEw3b2Xq##YIG+b@rfBlb_^Pbu8H@9rku$gA=!yQTDj+}QXKJ9y>!Bgm#Qf(p~i zzXeVZY+c9_42>Bmam~{}91p+BJo_A?!4Qi{j{>78M;_Wl5uo%gykmUW?taZFChxl# zKKaBGU$ADguMWOCJNXsw-r4=ft~~$Q&$3%mJ?}x%r1E*z$iU^9aD+87@Ok!yeHYh% zb^U!ZB2w7LOT?oKdOV7CH?5$@T$FD_k2!>|PLF}8hEeLB?CqHKPf{%xh#)6=9b)z$ z+6|9;p1EMHIl9frPX@mwpf5dVxHNFu1S>~uBkom2#S$teC`yL6lM#O= zBgcc;zS0UQl@l%LOd<)PJ=|NUarLb5EP|HsBrg*1tdgKcE-Fd(6Y`**8P~O$7*mdXTE$@0RH!+dhLZ8?d(pzs?`>SEFS75V-wwyR0 z@wIm3w(ZB-v7I*THw#;5TIhj{I38?-^5Fn+>4gKMU8M(NQWvV$`Mu3eE<2v=6p@Gk zNCbtGfIjd*0YsclQlb(vt-#v=--~dI^)y zbDvGk4rIFWj-}Su9JcRY*u2+ix94XnEv{IBM9qJG|O%Ynt>aT>>R4GKv(3*4t0Y&}G|p8lniN2!pZ9TgFE`lF_!Q+5l zRb$9&5hhWDQnoLhibo^iV0()XZUGKBmM2-#1+*#%-xClb?ivhg)C8;|hg0Hm#EqVn zjW?FOTIh}a>ti=H`J0!HF^luaj+0F%5q)Smc7*>*bsSoIoBKq2L{;w|T#Jkf1B3rt z9~{gNuF;#$Ek3w-OK9uX!=1t4E%^RibnDiAp;+wLSnk2$v46>pu>)fy=Hh+f^q%U7 zvA^Q)%j3V1q%%)9S5f|Wt&c^JJAl@|1gTXT)xQiyCxreMdLE!|L=|OxFvLGf@(sXK ztyV9FISjD~f6+j4RW5XZGX$?c;g#*)uWYz)1N+1sca(OpPk`S3Eyg*Iar`9FIC)f5 zy|N2P0viz5IALnPM9IK759)P*@k#iU9B7nym&FCc`_cc->6=Hgshd;j*WB{t+B5n7 z?wh+a524zq{on9W`j688`4T>WtI+Vs8O*OE4{=6XS1gij4xSI{90mX1hZ)HY_!MZ7 zhFpn(OuA=Y0Y%yx}r7`Zpin5NG4BWV0`ZbF)FKi;0OX;kMFPU zPYtSSDs_LKHJu*n8Bx{lo`;^f>5xO;KE>8g+nldDHl1Cc^;jp_#yCDTFy^RA{vkD z*JANh+w_?kH5LnOIlbk`mNWZGpWU@^nLSi`Wjw{cSsG^Fyz)G>p)?0{czJ$aWOpm; z@Idpbb$B!r%|$xAy1BTZK=U1+QKPZeEyuSU!oN|{=KXqpN6nn2;h((j2Ha>kT)!gW_|QG|%Znz$Xv)&uG2bA#j7%Oz#t~ z9ltmH))P0n-S-|VE-wCMst>PXW(zy>$z%KXEyA2E10yKc9?gq962+A9oRU7AYajUJ zD#ANR7dire)~;a=+v?dVUK(Tp`TR549GqHJX1ntI%SgIH1Qnz!;se~vS74_nUfKs+4zdzsePulwfe?zc zMGb_C6lwmJ=H`xIOT-^(_BH!_>6nLzDToJMAFfG@r^s;%E;9rxD6Oom?K5Y?lk+e-5Fmuhg@iyt2w2+D|9)p?SH(7vTb}>sWeo%<)t_R;7Nw2|Q*Wc%sSs?$DqI;Fs*2W4Nu zh!CmB$|-YJyUS#~fXuqmYJq730!-kqB-~?BZ$&Y{`P^2|?xodd_n7n@jqH$H8*c0_ zZ8&QKyHG3+HRP6In#nt8-xn&>XaYIafl3p*de)+@S>;WDI65!bknibkPfHJG`h#n- zeHq|?XwIYkNA1-82U-^#IW*oNng;ui1S5uKj%%N3{}I0r3npwZAa+ykAHNIxk8qAF zD}1{BN0=ZB$Vd{#{^Pd8T=ZN`+YjtJO!{&$dhaX1&quaYaj)Ewn>+ zNV}1R5b%y~Kkk~osXe@N>6~(JvMY1j+V(PtGa$?WCk4M%RJdBa5xoIh_JYdOw zaM@t4Awt{$U~}BJ1P9nRU`Ay^$3#R9EyOO0Wi8F}3Ls11;yKQXIv#ia`Rw7d=R5aw zJn8)8tl{fnS9{Yo zetv3dYIZ7tqPcQFCL?L7IL}O&S7}@a18#!$RwS*4HUS}k(8T1{g!xhF*BRn`mm|&N z$Z%xa#NCI2-uH#z)MGw63O<55vzpqoIXk=%2ABueX_r-rW=3|lu z&AINHFmKrdyf2V71X>T2ieuj5f(ZO~E_&eNn^Q1vbqLu&dGUR{3Nd`xzauZi@Cl!c z;%Py^=U$nUnG|X@4$N`doqvLvNd$F8h|zEI1h^TIk*7p3Cp$CQ<*-{#ELVh-pZ77c zBiqUAdt0Kuz^8}(Id=3Ue#Kj~h)#uZKf%0=W4>c|Z?lP_jO;DKBae0@E+Ym2>dxf@ zyza1>r@9Bm>kfow^jDG?D8}k|$As+1We3QQHaT%UE0>9LeeF+pS!?}#&Tzs;7t(P| zS!%MxLveu@4As=Qtei1_28j)m+|Y!%92t7F5Xwk4GxdBAd4%#4uF?oXYjhfFtw)Kb z*v|m>P~QuSrNlmR+>)j+KQ~OsYU;@DQ5OTP&OSoRhB+QWUNm4*Rm3kXV9x+5P zT6##5!Nrjtns@h~`U@Ej(%(e$0x(R~0hky39QG1-x`*B0gEr$XSg(`}!+%nFuPo0! z^}QGoXL#@AU(Wd6ZNqIp$9R$78F>Uz zdUy3d(gP8#O`DYs9ijAR=92!rqH9gp3bs=EUQb?5ZzvD+K)J@gK-`oNX5pZ)c+OFB zY4T1k9O_Qt12Mu$28@twr~ItdaNt0RJ5Xi0$*tG78^ZVF(bxMi{PUq^hv=T7{Q`bdpZQ0e~Ou%g>-EBikhG0LjXL;#c~yOzH#?%Voll5>OhLc)1Ro_pX97-LeW3 zcltbjdwd56ZY(`5+n*Ll4+PORE^Ow#P;g-b51rb?rXikeoC_^*rRC|DvC<-H9~-sL znl!6P3pUe&E!(}Y(|?X(&EM4C|qnEHB*+xtWNb*7G+e?dfKtr$qgwYS-%BP)_`huP&fxgRb#IRG^T+(M8<5%5i; zmQ28l5X;7UDzN6jXNCgUphErKy9L|WnnEy|s^j*@)L=b}TCKyjinc{1Tl$})oodFK zrGbL#a>v?~@~VPBL0f4Wm;k|#x{>T&mSyv5ow=OQMJrlq)Jra;n->xwe_Rr;xE+8Q zo?Oa^m<#1Hido*tmCzMsD5&jC$D4K(w0yd8*i<%S87ZNYUSyReoi&+?61rq%|N7Fn z61uhn=X8PeAyuv#k5?LHNVR8tYpg>Q+E<6OtsCLa1lZY7DzE#gQ6Sj%R?n z5&SR440dKlBj!2?z*l2Z%%Dn^#w$7X!81OCQC5&2CaBeprI#c72l!#(24!NI5Pt$oX-U$N}oQ7_#B0WGLQ|;XTVkJ;stZep4-u3&70t@7P8GLE1q1!e<%d|dv9T$^fDC_3>4uO zF9jZig`}uLk+5C12nd>VMu?zgpy*bvyyOmc^)$zda9LDb@B-L4@mpaW$hS_XxeJPi zEWgVs2)RLjQC4A^$C>HMAVgif)a=SK!&lJ5H~C-*FfFN|gC%edMWqGZ1H`oxzFpzy zh8J7+aJQ~t`r^`+=PfypJ>}2VXbSxtoF85{$LVX4Kh>W_SMTmG$`$poFA8T?c*S4D zUKh{L@nr<#q50^D8VQ=Qlj0h*7tx`8&QUUC_ke?f;S;J_H@>m}ZJ5JijQ>IrYCsg; zg7}p)RG5PO9B-E|r?3)Rr} z3IGK4m-mic!R}MlV6i7jl1G>sZ8ZY1{GwZMPYGKpn%-%u?ks_y&Q@@DY;z@~q$8{#`puxla1v%lofmDR%m* z+D9&W&G=yAvxDTLHyEoMOnB;1FOa&&E@w}noH;@+$3P}4N~YNj2VGd$bB_&3M7XOG zZ1$Gm5iM4AY%t)h7tF$(;r1KguZwk=!Zh-|cPEGpEeTDC0O^_}7ChQ*n;)U=2iv90Fj?BL+4?i=&+ z2J`c7=~}6JU=)tYqO=0^J<928BI|h)88Gk_Sc0)3=5EQWFi@d_gnJQ%;88ZPIzuX4 z8MCl~&C-#To3!FOcCB6da63!3vsS!#Xq-s2CzLAS%lHfK4?X9w4Kbz`#r_RMz+T*p zOi@7E6`lHOax&_mxXMY0@1Qnh(S}}>T0m>TWnAr%V z1cwvT60lGj5F3(n)FC$<3gW>4C=Rk4;ULC^YD9HTV^Z*uRc?al5Y}R8;&p5RZ42~i z<}HhN+0UJAPBV1Wvj*M(fpZw{-KfM4-0D$0Qj$MNHkU76fJ>d<=8BI$6RDqs-_lZ}#W3yVs z_mf8ddih>elk}s3;X&z3lnv!q3)3OHT@Kqdp8#!88bX22Mmrf`AT6|$8deZ?*ldDe z^Vxj3$*P8nCDQ@9k9jn@V0939vsshBN*jT*WK6X%txH<#W^cNrmvh+r9D51ZoYI>3 zwQibmb-IfOezE7d3wGHif5SX}^2V{F%p`t{0Y@`Ny3iLjT0lzx+KTbm0Ga4C5VVQA zL38}yWzfOsh_St}oj}w$GEh#XQN7rm5TP5bm6FltYx@SNDXYpR1)<<;4Q zMG8O|`U=gB{5>djvzy&gz|A7rd4Rt{XO%|8Om+*dxmd&*as&Rvlw!rj<>;Hk7r{*# zxXBj}@-KR={0)`FeAt2c@MZ4XZIdE`4N{mj5W>Rq#_2Et%8OVe^iUE+gI=^|aTVBItmByB zd$H95MgTI##+k9{u+JdFQ+|&2kvH27au$%`{&(_-)_s%5>Af1#r*Z~@^`gGh8M(Ek zrl;0aRhAc%Brh*JGc}YRBI&M~ATk8a$Y-egQ{Yuh80kTwTA0i1a-p5oOfQC+N?*#f zSmu%aTtqRRd=XCUgg;M#iT*!NrnHpynKN3WO^prpb#lOYkSW`YPE8${YoW2rB>;-} zxpGEICa{$8zFrN{6FIJjgJD_;v0qcn7;usyj-1rh<>wdZwDtM<`O|guspVJLhdd6H z5>BZ|46r-hQns6wTWR880OG}9`Q$+!WXac2KAnOrUFW*0YGGA!M}B#Eer{>}e;d7f6cF4)X*~hFf2}sPq8B^93EGw91CM+u{rz|V-EtvkCHXDs= zk}JCbLjiDHpc9re9oj&dgMu_Wb5`VzVbSi6T?~%&XAr=OIX>yq*jg+h(gLh8;>XhM zyQJH{^SM!HaDN$_JHCLZMsLO3ye6?WMxzFo(+tGGL+S zzZn*7R6yYQ@eY8QI>jt~dvWX-N{IsAiUP7r^+upj00SSh_1|Le9Of&CC|!UuUapj} z_%SOrexiIPOo4=IEickA8Yk|C7Pm)SaRfCG%%uXw5=D~6l)`+pm_0tBI7%_ZM`Q1N zI-w-UT#h3`*vm=aZ-O*02Yn=}!UkI8lT(ZADNFn^0v#{`!ow6fN}L9qj)xohD6$-E zV|Oy@Kk1=eMnbX4l~Pat8z5@>$*x=VR?6Lh96EU z(kpON$rR0HwGHLJT~bL(xEwn1}#T);!-ok@L9G7=8=TByoR`B9!Y3U zenYEhv|$4ZxR|AN`O|W3Zz67xM=UMGAxU2x4udAo$@bF-sgI z&lJe-#toCUG-#jXxFv;LginHjB90gnQIkS5ouwmgiBqemwSYkeHzKZp95spbpcrTJ zk}2R>Y=KE#rs5fpab^;1kL@}=WC|qYQ1(}f0NWKP7xSF+a<$$}pED~znrqYL-qNYMQ78c(rg!2`q<&H){A|uq0~Qi83WYv}9N;)|{YEet#L^R=q3Yp2 z6)M6%BP_LCx)v@BYSlM}xcwFrr5XICB{YmDY}z5MD7f zo%sb0gVtA23i%vO|FXE0`!Ruw<$}n#LxEzqqH}(m<)kNP>T}SK&#vOS_n^MQn-yZ$ zIDh5ym+J7X0v$T2%gXW%YP4=gbJhDmy3fq4>neM{s@dVvY6l;w%>Ssop`rYv{K`i_ zO4)uM^t7N{QdU+T1+!yBp&Vs|Ny!Q3D71o;5F1mJuD z{|-|ZRgCr!Fl49s0~%+MbvT`Nl{OVKJiMl>;SUvpk#)7~z|2zaX|YLq``%a(nodZ@ zO?s%i4zFFy9(Crxg~o2T=0Ju-x>nL-!Ve4M<{n(|M}1gsM4BKWLZ@vc>D&ai`yWL#xfV{p3Pck>%E)u)1odB21C6&ZT2XI7>p{y1zNu zADz<4S?rf?=@0ENF}iyQIH65L11eGa+Xd3!DcmFe*cdV7YtgnYSqG149azA`{exmC zabn|(7h~g#M@UoVbI^;S6LTfINq|b$A+QW$0hP`9cqY+Q2MZ*#{*M<)kKa(jj%}50 zc89EXo6TVjy2X@Oh#f=L=ufwco$N~?3s#@aLVI*Z5@bjSKZMmsFlr6RLV#2hvPn_1 z3U8Vp%3_a)ApSYqG@(;w33ycO+EM zP)Jn|L79^r0iKGo%@^RR!lCCXT1;xA%T^I?Y&4oQzQWq9ifJZ|(P=LaH#T8wxPq0Z z2eAROnhU)ljY;E`K9rB%^bjuF%5cjyw zgY*1YDCxw`_Tf}bsW;Kg$(GBpdZ^JY>l3a zQO;E4nnGI26e8)nwG9rohY3Y69dv^NL<*&N&K~L)4Os8OEn7pb8R8tTe4nqz%5(?#-N#;Iw1FMlv^hhUI)$;2cSysE%W`oPYac(0m+LU+4vFhf3&z9c4H>i`4#32RITx^Xt)1 zi6a@G)3a2x=_k$}$}d$GpYbtqVtjp3)m`B-WE;w7V@h98;WRj;y?KbJQx=l5NINYfs%9bv>=2U~5nP7AfYmTHFm~6z_$7?^jNn+3hylXs+UQ zI|=*F_h4&XNi$DnUal9t&xiR6)k7^%{8Q%kqP$2*t%YUvc;T%og^_~2qDLNTR5u+a z1}>|kF_w33NE@h{ksEFZ*-))uO?!T&4PhNzev{R_Fv*)TGm<|oANJ5QJ}PR4|7WTv zB^wakIpIG8lPWc}s5zV;p50t#bDfhLGfB!yVih(owu>S! zfb@xba8T4v-2lSg9oTr!f}@(I5;2#|;K+D0V!sq_2eoF5+y?5L6@T)as)ARwE%gvBGtQ1SI*KCh{!DTM(M>!r9L2`<{wOFOw=IKc~i5)75 z%OM?^**2}YzE;kbi$rLTin7hce)wT!Km2{68uB39&77ys&%;U^Kuyz8F%>BTA!EXr zo7xGH8HXTM^15$AUMgYE*pOVd9CU~5G~|)xCm#3fvxCxO_i)SO+c6) zje=fWRifGent@vJh1njV6MmtvAiL+p{y%Q|w80SgFf@JPs$L{*0uF zA6-y0t8@v)8A&-tt`kd~v6pK3r#eS0#+N$&DLw8>Ls08mpea+LT0>#R41;Xr;3f3& zJvQ_9+oi|v_d!<%$pq+f<=6-!C6UZ+UhF;I{_}n+kmiTCKOl2RQ)W=>iDl(I063wZ zBo)?>oIpl;Qnn`>F48t55ZzUPQ^cZ#JQt(g9ssSgg?6->O0Y>4pftGP|33i0Y%|~Om@;(=F(bSI0&(6P&$MdACTr- zSfjkB7JH9<&nn8v_+aMai;pRP zi;pjQO!-?x(1HLL)Q)ThZ5u$_dT_?mg_*(}V1<4m)3>cCTo^0}gECa9MPP#2(YyZ^aQoER%_!tBCv>^Fb}~tJ^n-dUbEHG_f7Ji zkjt{@>bY}g&1!9pMjILgVQ$CVj`{Ou&6zc4c6;m0)|oTfqSK?(Tc$NMH#9dj;i7_%a<)(vUt(L1>Iep=nQkw6=tC$w4xhC(Fq_5 z|N1U4(MZ5-GXoH09Jm+WGPTfj)2P)D$Ar-cX-2gte#CW{Xk{s#Abw1E1K>7}jB(l{ zb*AwS#h?B1&4bMc*}YAY)XYAhk&EyLs-}NQocZwc=!zAMOl*?-6izVq6WygBU5q-9 z6g%SydR@~h%a-xykr6W{dQ1F-nUx8Hcc+P+jm{Gt4pTGMt@FUgJWX)~g!y+0Shwcy(#GaE&{ zPQM=$93%@Sgn5KrL?8EpIVM??NBzkAFPueAqkeldi#c{_!x|D>vBSqx_Gz5#c~zz}u+(MQWq@ zk_&bF|AUTDEiDDl3G^H6iU{eYlx&Cu9ikVb*=YL=ejlO*;JJxltO=Qw+oPd>8Vtr> zaGv$X`Ce~HQC`R!@CK3{?wl+~vY!GOV*^b%Q*r*zd-)e8Za zVBLAPtsgqD7Se$oW|8!K1N*`id-g6E3NiMqYjhcBL;V^p*QSXt!Dp#7q4km;Fywd! zKezh*Uc;c&uW+P^HU>s3#$n#k@izXq_&CI7P>r}9a1#Jd(ncBs3Uf_k4{wI{mvn5( z`Gj%EAB0rqn?H$hc*ck59jabP`=)AoJvIwratAmAmF%*l+0x*}gY7NNF>AnYe<{x~ zzvXp{y^a2RwOqF%Be+IPF9XmbBYT==%#h(}S3xMGt(#|qLG#4gX-X=B+&`L0t=}YxENc-ocm_;yEP&-%oOVmJfh_1=adqx(qPxjuU{x9drKecIEohPrktY3A%^wi)b0}>?Iuro2?T{@K+ zvWFxgEM!E}{i!J?6@#JO1Q8j{mLk+$Du~l3@9M)agmLk!TxA)J9m%Psj!{*H=AEs-KkdlES?Bh6V2WE@;GD zSvG?#Lx3g&bQE7FI=YFDMxvwe1KEhDaQsz9PCT;-VhWHCL4cGX_s}tp(~P5eoc0-1 z^t8`J^C?SYj=?;!VyC+ql}YybeEB|X!GZz!N6J;P@T!o<9pUvZRsB{dK^&aj!ac&H z!oLYWQ;kIZciev4jcS+s+&#4O4OMX`s4$r5SwE)ZNaA_oKqV5RK4 z1be#Mp1uncW-=yA%%=vRkPML6LL+Pw(k;p9OQ>a%E!GW!)8`eFnKKz)o#;>=QNJmb zdD2*l!IJ{nwP4U14ZsYAC|8FF*EXBE7aeZxJi+ZwS|ubUt#y+H4k-#Rl#Afl{SC`t zr83~%N-2c@X&ESJp+bglTn3^8y?=?5QBjha<$pv5(trN5Wcc513JSeE{Pj(N$YHzx z@A^Xy3bE@e3X#l2EY(n;{2!L$E2%KG0K)$N?NLF|urTUto__l2pFRDvpFZ={kDoaH z$U_g_|NSGk-g5IzH(Y=9p-cB)c)`%X=5sb}T-~!``Qk-g9if!U>!;t~kZBJ4F zCf1b|RnUBdfY`#-!#W&1A^Jl4VJ%`2BM`X`cWMLKDtCs+4WT&joUW`%tgIkabCMfz zL10&ftja4QBCfb_Rt7sBC?S%n@h}#?$_n{TsEh)nTle6-(7cf90?#AcN<3P?Dyu64 zmE}pgB>rMD){19^^WcPY74AA2TC**;C`GTaBN&G@Nv$!qY}wL+87xz)vfI@9)JX2O zHKCNWw7(GOq{nZv=+yvKNS=P~{QTD1ism$T^cpujxLsM1^xD>mt|c;CbZ6`CU=f6@s0G9NQ~)$Jh?`Iqp^^v_O+pG%(L4QFiQM^>oL` zld0Rl#Ec+HC$RnqvjE=kWYP`(4?w^?p!0;FV5kdZZumSU8W`;$m4Db>@_*9a^M}um z4LQYEH;%sQRBgX-_(JhYXDoN4s?d4;^$F+#5dy%%-Uc6McnV}#VK&C*(mt0A z)+hKe$1T_0(A@{*;2-7a>Re1hCegAvFNRGxD}&*n8ollCFA&aNz4x`l&)YN1Rx{HP zPWyp_SC2sdBObWyVR74=8lC#jZ#ujN6Cy9b*4pvb#|U}<77HUT%zMH##BEssJB2LB zhK-LE<)^0@bt=fHq2Nl6+ndv?HK)5UZjw&7jLw2(Z!*FQ!{R)Ny#Xt9Fso+7UOCNG zc%g7BV5W6Qt0&w%hc+SRUE5CVIkEFXh_yvBUOeRjWUrhg<+xapRc6Acm@y~r zPEN(m-6!_^{QUE^6a-3^%nuKI%@vgy4%uR&ucTo20If*aZ&p!A@A2+J0H=MlrG#nP z23$@!JUB6^Df+k`J8T%#pRU_@0$hLX$qH9zQJd*Tu3?tL8`XRIrgWgTk)auv0hvkb zFsiQ%*N+TSTbkHx79JRw)R~~(4C)MekA2!STZ-NYo>8s?`z!i;k`N+{jtp?5umGej z5Cy}5g&kCd@TLH-2qqPm1mY6RV)(#iWj(?7_J61gK)Xw&$;1g{VZ@k3ionrF`FI>H^i(os`#@Zyan z*Hp)=C0A8$8%?hBO&|#++$E;lpo&Fa$mwKwFFLtF*WqY70x!+o%Vvs$eIYrxCh z!(evngJcIbPI8DM^tI}EOQG>dbuC9RI@x~hIogM2@_0q(emQ^EWe>jy<+a&F4X0a~ zh-xHm+rFKm7wy==!gAl@EjAw92we?rbxf)A|GV*+MowsUCrW;Dhf`Cg#A7=7+Uepk zVT{Dfos4l_7p)E=>JE@cX_U~iMEZ2HL?s-02!u2kCFFJl!3O+}oIXOiNVn5K9}f=& zc>&fah4@^%NaW^bdk7gdazfpx6i{n7jvRNYKA{qRdhMpsG|lA>athJX5ZTY0WxUM# z6uS~c>KtbpOp8=HlfmnJlNyHTFV9!h=Tb1%QIAlj6LxpsY{H_Ej*57BJ>vaCwlj>^ z1K`*bdQO&S^lYqiiK4cumn&o*IlA#$AR}}p+F<1FFk=%-JRpa{8hn4e15KMaSEazl z1mzEBp!}!KRwV8`Wwt^)OfJ9d06=zIC8Vl1PrFd;CtGH#lq7vmRZUon0C(tR#GeJ)8{T+ z0O!2v@I!2erJMYd@syw72Sd2gz-yvKSqsT1B7rl5W;Z5Lqn;qUwdB-n(6D7VFb%BD z;L6LoqY@f#yDr$^m ziPY?Cbnd=9qO&glRVLo!2`lv?kLtICR?Zv!Iv1G9aCG1vE ze>|qE7U}{D?nvI(i2#o}1b8Gk1d47+DaplNb&cxrA?3&3;s-Q0{{z{Sy;~9J=A>43 zLa`w8;^W*A-2TYPP~C(6rtYN3Qd9PBz-FJkU(=KahgS}VPChefj~06&`8ar`@GZeqHZEr}8+ZEKfcE0xup5&$4pipEW?}MP;T0ze z?;Hul_XnD)q?5mzusw)9l-LvOiEjybj8K6sg?+9==zJ7r5Ev`6NZf+jHq29Ds|P=F z65Eo@n=aOkU!?EgN8>j^f@Q)D4gs{LAkkScH4aeVV#Ul@hlr%?msfuzd%a?N~H$Iyk8a>41D3I-WOJrNw!=LF+r~oXWt|iq1vn#yR%mLKMCp!of(F7Zo;3q@jEZxWj=m6@s6%KwR z-=!JnyG}lr6cFdfIrj<7)*g(dc-@?YYYH7jVk{9u!izb1Fo7TlSrUL(pMw|_UZOZd zON@zJl%S>o14(9FUqJ0oz8FFg{tPJxXH(yT00b+3d@N^ywmrg{Xb<_%Err1(nNU+J z>P6Tl7@;UIILJOhL=bi8QAFcLvWg?{^=15o8CKgYCY6C1x_eeF=$b#beP(M*b7Ngi zWqC=&g|!%eLBtr$ zU|~ZaYV~kk(3$qbf19FwK;1*!E6iM$aV}QZi@wct;F+o? zKLb+)JyV_XESyz!s$~Sv!=F^?y5J-_Em~h&RS_VDDHy175X&?!Oc-aHPGz1ZwGIMV z$39F*6`YpgSGh zAuIJ21J*F!cc|BROK^d+b@&8l$H(G+Wu99p+!HnCVInQ$V^PL8t3fjxnJ^##@?k%X zOg)(ZO&O9oBhd9Y7CQL7PtT= zI$?8ju%(%UVNF=;KqwWp`_KyT3Lg+4ke)KCX(r{P+OV3oN``JMuJUvce@+xI9ek4q zF&#{LMuV`T{9wl=2Ac6iI5_ef_EVSietZ*{lFIhvtt+eR&}qU2vzB*Q%za(VGHg+W7S^!; za!U;^mQJF-xZld2+EsbwEm}<|f8Nsh7R!0dMf0#(Te3VPx!8f1&)*C1HS{W#%5Y8fkCO}A#5ze!_F#;f!8uYI15Z2F3wwDR@S8dLzLAVWd>^7*{~pi8L3O8;?A?LTMdmrv80 z7SCNa->MDP%`M`zv2N%rD4j|QrN5jvslRxOLNmP7U2>;a1oY@I)K^p^cL+r=qS}Cc zrVm~ez~Td@iUs{5n1oG#u{RyOE69UA4S8Iv_k&F*ms}hKRSzg9s5-GzQNXBR5-r0x z!XUM13QmtMIN2=(f*4Kgso&>ZnDcul|CDs>ZMXCs>81FXft!fcDfOx^V-*wOLOyk~ z+ma+d+xv$<48Hf?c>n@|R|x6SU>HnpXkC+lK_EwYk?JYNUP3+dc|Es0U%Tw@Sx;tY5WoaihOcBt*QL)#>-a(IofR6pJdW8e7zb1_c%nSQA2mHicI<+;Jm+#t;yunpim}gg; zeBLxe@%mze#Yj6-S4x_7PSTU;G(q_)%lf z=AXvD5q;!c(0xAW?m#4Rcx@F&!f7rOsUGKcw9lGWUk6#x=}2B)mgPT{6`n*Ztr-*X1nrmxH3-WcR z_S8Ujh6sro`(NpovFH9*df>=ud*H?YCmoJSZmgN@uq{p!ywTJoyHUmj!cdl-;mIw6 z_bWo~8y7YMlMQmAYRT>9!!OP-QI|QyF5V+q_cSrn@Qdej2{%dIOOxUktE6xLhN|RgcXDIh4 zaay@7*ab{R4nQ~?>P^Ql=r?8JB!}Q?I}o} zmMTDgYndQ(!osX3$=GC(6XLZYqGwzyj|R({Byi#;2V4w8hBq4bWWm#)xImX|49Mzc zq5z6aZcrTfF0X?>$@$`9*ihw-T+e-%^J4S471W%u7Tf~t6U;=^lsVaHDq%Kp5ed%% za)ah}P@=S1xRPBh=D@Qwf$i=C)B#XAphxCFU0U5*tSd6GJNK>u{ImiSp~kSDc$-E8 z$X&V@j0@m7$VSXP_=2h70qy{@LYR^y zRA3Tp=vU9GfQN9VH>@$L*r-dImO3-PqPe&r6Hp^KHRF}OfQI-85~Ky^Gon=Z!p3V@qfCJ0o10g$Ss`lHpMoFQ{td?4D7EUv~c#9Z&L72v|*sHMv_;u8n$@Hcq zhJ_wABo${7?5$0DjR}75P!kDyv;^dGMy7+)h!M&*qiP+jNW@;q$6I(XN(wkoA*hUG zWxxBY9)=yhdeXBHagoT+pu<-7OpgP}M1J+ur^&9pwx+3}rlq!}x+)M1bD>|hg$uvp zI#TFI37#7!DuHaEU%+-Xj)Gx@wlo52k_b!#SYZy6txb;o#<;wtWJXO%TCl!0+P!)9 z;5J9wDyzFSvM1f*-LnYZ5E_+n+Vq+Q(;HhC*A=(oPEV%Vutv3Irqkk>)95hkJ*>HD zHum$ii`Ol#9o#6*>nX4PLFW(7vPRO}4b%Bm{F>&e8(7)BVNGXAIVcUx*hb-c_G9r2 z=r}!O69(@%njg>`X^qjU0E9*0y$Q_T9x^=wze{MBiY+6-4nx}M!6<-9NM%UIOR_P# zAT@%FYuN)%DSaL}Qeg`NcaiHqCvc*4b~!uu%b{PJQW46O{L)Ro-a~_wNejmAWA_1T zPA35AntVIlFzX^TqH(P&2P#b`j`j>lMUx`!$SPSt;00XL0+)EGZg}5tEp%~g)98=b zdV1%`&td!7$lqBjztc*yBlcG`(ebx=_}g%Gd;>FG3xaL{*Gz#J6vSJI&R3N0O%p^7g1mS341G20g64){fZOb zCHoBlnuFI;6`<-$`-khqN1d@bwZl^PzTtgTh#~2_s_COYO4Ruo zl*2@^$f+&Urx>rvwlBDs7v-DUw~cRkVVWWTBT|IKYn-wEB>ou$LH>dWUmtOLL=}3N zfLXp#E=eZ9gH@9)>BbFlcxgh!+zSLUJmtR0W_g$s%w|E59hDP1K~i)9b}>;tqE_tV zy(lA*D~W#wW53=zymygmIxnb9G*#@#*ta>~R|^~FSMf*iB#?$#l%31q zghcaym|t|p)U4=*^lld!nSFgRm4abvEoo~Lgtl32@E32LKCQV4zT!fykZ>0#Pegwv zg#hD!shD(wQyjL(L)Otwbpm8Kp%HOUUD#U(v*KakW*%F&2P!K8q%cp_}wem5o1H2tlCP`9_K$J)<kS(AHE%{i}KvEXh^0JF|lW)P!cGl0X`mV9RTH}z=}+Q6vvP}n?FY6yfzys+G>O%^0i1a(Qlaq+| zrAx$S)jS}N<^iqj>Zmo9K`JrV!Bn%;m>%mX4gNfXA$;tJ}utmW%&7oe}+geT!>yjT1z+F70LTUC>r)dN{t!La#A9FO|HWV;x&J|<1 z9EaT@>=w-=fBo1Ij0}#I(R4ZBbl>brf0qy`AOL^t$r$(u(P@ zSrMfe22ww6rP~3`aj;3uPK08~0-_WK(PA1)`O`~MGiv6S6wa>9WJ{kdD=I86D~MR- z9~U1#{>!*mzliyIi2Vco$0uY72jmeIv0-=YfLVvZBEWr=ufobCZ6|?6ViksWGcCf= zLTU&4iH6mgAC_-l7R9^_nr6^0bR5PgF(02gvf^+jV2uzv3@S7}6LZ@2kIylUD=p{F zRHdhg;R$(YLWBOI-HtE4hbhDlEe1dND6VdI=4=Z#7;fjlk%ZkfN zOX6Td98E-_oSg_qq>%;oiqM{D2hj{6g(|aCh6symqGNS{y5z_T>Vp&LNOScpbwg36 zHJtYBZAm>lU15Vc1e?B-yZiZc&K5KsXo_P#fwZrb?d$&Cb^o4AQ`GOumbR4`x3M<9 z&|#AkJAw_sgXmxB;06d+>rRKh!?*X)ABXm`M2ohEULKRMpn zuy-b;3~&V0+vqnN%^hE#jq)JYI-mt}T&=Q_!u(JUAf-;l(4w{Ov_zCA^8n~b8)DiG zhXDSTI44rZ9$D3l;oP_q081+yJFTml$Cte>)pT7$K|!HTS6^6A7^3q6oa2hr3b>^S z2&o9E?2#akZ~eFxr9iithgXI8*u}30^Tfd7G{QY=+YE;v&6jMLCb`d zHFYwykvu;HUFhj2O%PY5sH*4ypchYK95(M;Tjs z)m75>1ILfQMDeURMj!QUF$~O*fY8Np70J&tL%3URl}2=CgMEqHhW?EZ#3}=rIfXj` z;yiV1c(OnyfhKXe#=j)Gc?I2tUB^;>n*i3=)pzDB$2+;t7srY7`F)sXPcy|PapVHk z*gGiyF4%MuW=%6$JHs*`A=V89DK4n>f_ek^AqeL50K&zHWzFTy z!5nPbQj+Z^ZAO8s92T8P?xgaPCN&_Gfoe-W&|HS1f(%Kvyr+2UGP`vEMb-{<0H+0%V)BGV|)~1!TT#r)Xh|IaVoV|MW`K+&LRTbQ2sAD&U|9fK9K`Y3+i2$D~H18#vy@ltRubO!to zC8gq+tejtpV+bh=sRJp9cR zAyM95Bq|HtOZCb@qPna=N=Ir(0vd@>gA_&{f5Y)Iq`Q#TAx%fBLb?ZOFVYCoow!fJ z(Sk(uyccN@*S^4!=t1wLchR{DsSSzVLGPlv;e6~%9I0G0kf@B*78OXp7MG6PEiR4y z21mR*_FZwQ#Kfgv;x`F(q~Aua!1Wl?d&>D+I7gBmLc;wcmmuvzUTDMEtGISZTsnFY zj{l*&=leLK%wvDXk?L?I(i)_Pk?<_FGcVsh<#->iAs?cTfIKi4FJ)AZKOy}A3Gd|f zCAya)RU^HmTzdk?*N}>lZb71Z4Yb@)z+Xh3~ENlrjITRQeO();+{fn-IZI#U^@9;wb} zN;GynNJJAV!|9H3dd163*nV2O-|y-ks1-ssG-LME!8LlA3U& zve0kSkmx&=o!&|1B^pq@GLWd=)OM8O^`-miJKamq6HVz_0EwQVIiUvaPIDE}gT}ZS zDSkh-6}1b!!-zz`Q9g9NO@3}Hh9l}X_PTPUx>DXW-%;K)A8L{4S)vQ&m5%f@(lbbp zAl;5cPBV6eSEIZ#d*9>(eI78{udktq*Unp=$&{Mry1I6 zjOG+PPxCB|t9V)Xebja+=h(YAg4Sby#SwGn*xzwPn~c4M^dinzAWeh?@il_VO?6ixQTtF|LBAY(8An=Uh~|{2+-@XVUy#?>CpaQMKF;YqIY`ttRClTm zCCZOzO3zci@kGB-ndrS#PSAsYr#jO-Lg{Bn&*M6^7uA{Ci`tLgN%>HHKwqNam*|61 zHxlxaKntlw{vJDuqgq^g@cRUn zy%+h@HR_+dO+Lc;r#QDL<)iWtZKynysEofxdLM~si!|~q^1cJ_Kslr?InmlBU5&ID zX$YwtWy5_Vn^7OoSsKPUXiV#zv{X6v;kXjlF}6lc>Z4;HtB-=kL>Z-SsXKPZ$OR+U zjJ!AU2|n@f%leUH_{Yyz zc6{W<*t@akB=^W4Vm}+XX7n6svm{E|*b7plv|!}t(&EttNYU64{QH=H9vPW0AIIK_ z{fhHv0&qm+f2)N@SOr_kZepK^P2zs>bJcv+i>lAno78uyKiACF9MU|gc~3iEyHk5y z`=QRP>(@P^`&d6qf5@OW6dHOAR~Y_ktTApi-fn!_wAwUko@>6}l5E*wdEeS#Jpu7| ziS4lM4SSCLZ2M08>p(Ev#4VC70KAyKB^)?Cy3y?|#pHGN~-7FX>RyBS{}8 zuShEVS zHl1irXyA`ROg3XXx=GCcX#*R?iahI1v3`(EVyC8g9}b9cz>aLVRYe+h4(J}Vv&DQ&7$r_eT%MG^!nnI z#Vw2b7QeD2ZAo-V-;%>i{<^ej>0L`dU)Hj0VA)N}K3?uz-n9JL<$pTs<`rctHm!Jl zW%tTESH63;{p^;rZ(6l})$tyor=sWVp1XQJ>kaj8UOj8|t7{@_US7LnoqJvDx~JD0 z*MGKQ#fGDOg?+m>MmBEVc>Bf=HVtfgV$&Drq@7cD&Wdx6pYz#f^X6mc*7b+_KiFd2 zGJ8wkmcv_~+N$2VdF!j&jN1wat{HfG&^}l*czE!k!RH4*7%~p!4s{Oo4;>nMaOlML z$oA#ihqm9m{mJdG?#S2?-LZGai5>5rr#^4Sd0WrB^StN3RrRgq-@4&je>&fM{{HiS zy3@XM>CT&Wp1h#$g1r|!ap8&!k6-xSuy=U<@a@A-?-Fs^y z7yB>nzW9oZPwX-7*|X=7OR_KNyX5%Zmc7UKezwoMuWsMkef#%4xbLO?`u)-U>-Qhu z|K6q6ORFwjd+F_$zHuP;z@Y=uWdoPJeX!=>k<0Th@4x)%D`s3le{WyeaOKu3ufOt( zL!m=U4;{J6ebxG_?!W4bt23^?`P;XC`;}`duGxRhiEF*r?!NZUYu`A$_wakyd9SOw zZs5A3*H>LXaQ*S`sJ}Dtoj1Po@eTSLvTs;%!womQcEiaVLpRR7ap=a^ZW3?Gy=neU zSAVzZ=EyB)-}2(ECAU6!TlQ^N-}e6P&f90*e%Bqd?s)L7V|TrN*B5s?@9w_))+6GP zz9Y~5Oa8y?|Gx1372kjA9{)Xi?|J)P?|s634fh?sZ}k3>`#0bJ$^-rf`W|@o!Q=;@ z`@zr;-Z*;nhu?b0`_Pex%N{=R@aK=reN_MGUB{xwb{>26G5cdnAG`3e50BRzKl1p% z<4->R`4hQMoc+X|PrU!H4Nq1)dE!UQfApuHn18bWsfMTC{kMCcHa>mDGZoJ?V?-WK0d0$N9bk_u=L~$uH04|bkFioYR|);%pmMGTPx6j(t`TOal9Y3;P@+0ZIoAoR zRhKL0dcmpsvvO__W~;`ObCXb}ZdcAN!V2{T%DGiosQ%48{<4yilA^MbvJ(I7bwe!! zy&L-n{ezK#$d-t|b={72o40ITx7L5&#-Tp{+JTKb*7d-B?6-$~V^2`|D@SUEE$& zRa%j_%TKrY&zPZqXlQ-!_Mt6(8~ca+xjRZDHIeeX#KTiwk+?2rx^d9o>mM5EUAykw z-hp%cTh^aa`@{pCeVCTEZuQUKKCm5??l1Ju+0s8a)Z0I_vESdaee=5hbGGd08yea= zSXW$p-g)OmHmw_4&FL6fv*p~m8`rGs9|Q@v_pe>8^AgJ?ne&&{MU9Ihxikl z@!M?t(vDo(glWj{bnlyoocoblkTxqh3?VmqdJUWr*5Mt4IH$Y(7+w*j9`(qr1;X=F z^NiX8eoQC(_~$|3apXKQil{S!;Nk+)IwGLO#!S6ee57A{S zN?8X7rDF6b`WF#4;hrI3H7~Q=A0y}kTTu47$Y~AFVGy6x3%4UTs@DL{sW(y|oQ6Dm zai9Ec@-tMrsclupdsDnLlgrXOAuk#Y1Bjegtn^B%36*0ISLUKOOh^BoFD$?(jV|h; z_)}!3V>jdCY5uTIK`PB)?T1)5U3i#Qow^<1ry7_u)nOns2#xR}p`kWIn1w>kgU{t6RCx*Fjx86?5>{X(v%z%A z4cq@@SZ1cc#ykxw9xt>R2&V(fuV$!%Ghlw=$6A>Uy{QHgV7+iQVDg*Lgl0sHStvl7 z2QnWQJ`t`J{(@D|AVi^=Pzw(U&%mWq3nN%PtZa?U#1L2v8U`D)1Np=WwEK)4r^(~pIp2*-rSghznb{8Qm)!f%Dw(Tu+n-VojdX|EK1kB;#z z=4C!a65K4jCcMK0;Vt29m||t3VIC0vUHCaxih#V-h0BU!YjhB(K-HA_(*sk9qJr(tbSlQZ$;w{qC*X#Yn~_U6wVhe5Qc>dg+B@x z3A^A_d9m;(;R)eCh4svj)%|1P+rk~fAB1;V7RzP(0sWpkL3$vtbi4= zBDk0qvl3Ry%HTXv!75o5BHh;rUkG2aT2=@6kOtNWJ!UhDvT3YEILW57R@TO5AW+gQ z)(*4$IS5-hkIhGr%}&Ex{gd8C%ZIVk_86b~anZdRQ-8&DOBB zY#m$AHn2Xnk!@n;Sur9fU>g73@lONVr@$D11-2lU>EG7XHk>&8}ep`(lULb?kcf z9d?6opKw6VZuyKtHCp>UUQg>VF@z&{f9!uIrf;TGXm;ZxzS!r$19*q?rv z-OO%bx3b%WCxyQYp9!CXZ`&h0&2ESP+MVos>@IdUJHq~jeV^UK?q&C}``H8RLG}Z7 zl>LxB#2#jkut(W3_82?P9%oOme`QayAF&^^pRlLczp{a#~_8R*wd!7A`y}{mOZ?WIAx7j=FUG@j|9{VGE zpZzEMfc+QykbT7d#Qw}aW`ALy2v@OBVN?G%_8I#-`<(rQeZjtDC)o%a1xl>M#t@-^ z!8#Jg_6RHruc-g0vnN|_BT24N1i%Bl!Apcj(@=|CEz!gPV3VR|I2x*(Af=I3q9}3& zXgmWn0f@v1BqV@bX|&huF}q`K*bk1_i1iWSCx`u5pnS-F*ciY0$uEBLeu!Tj4mz{4 z`T!((V=aa{vof>ts;aDNTn%4x=!Nh2M_51nf){-6j1999Hp-%GjE%!@r%b{#E6%ZL z7GvkxJM02`7rxgNXBU~K{H5~O%5RjvQ~pBvbLDT8KU4ln`CE9s?Gl?|v+!QoWjKxg zntdC-0hwSr{Ps?gy~k24%`))Yu?Aa&CvaY2%j_zALjD7I>eGkpn(~eEuk0hX%C0k$ zS!|7E**e=`H{e-$w#W8am3_`0vl=^Khpf(?u&1oSn(T-jv+uEI>^b{_y1 z&kBAI$AvC&@u`Tm_aL>MLX+eMnrTxxEG+x%k4RTNkA}RiLieb2$=7Luv~DT2G zpRy00z_A=o@dXzXs{42CMgjhd#yOCw*2{%9KpwnK3TBTR^?{xJaePpNGx@-= zztz1{->V;19(NxT46l_Mb^ECM`hg9|VYf{%e0{e8cN}3`fbQ$ElT9#Um8y->@xh~7 z<$0v!;eA%Qeq@*6gnZOja#1j0gX(ZufbOic1gCwp7|xQViU56Cw_9JuMZ2@oenmQ+ zC5;sUA{(9ukv$LZ+i(^1?YU^=h9?o(_wew|qiU^O30DQ^zA3$|N_bOxP?a9su}jBC zmGB{fJLJzHMR-^CsmloOQiOE@2JZTYW1#MDg||pugTNMPZ4kIm#HIj|`<~~KriX{O zNqC2XIi_HCq(jHjp&fGQSb)$@qk6a(I>u|{fu}0+%!lWF_!k}?en?Ke5TNg&o8`V2 zE*gHY*RWwKJtOdddOQ=L_d&T@X;hlkX74ix>l1RxlHIHfI5g3UkoN-b6Ws+m8sQ>mSp?7Y%daAAbNV?3biwta7{!g*%rY?$zEIv-==Y{2@u?dVi7#T z%RXGfw?*H`q5Y)ZJZjXT(u@9?^fE!m%ur|H#K)N+X|pc=$+IL)lE~ zir7;C8q{8v?d7y=3*DL2vd|6C=zk0qvvOF2dRHxlt2761(j0{1zt`-l9PV{x5BG+_ z2P}VX37qh4NxnVhqFXh+`srb!M9PZXS=|Sp)Q+14yig&PTZeBq8Ko}%ZAawDNYfr_ zq_eV3d#jPATpAhK&WJpuj5H0gk)|Ox(lnMvM$#9g{DRY#dJWm%ps_Iw$u}H(>(ahs zhsM^(NI%lDUz(w6o|yr=RpIZg+sMecGSZ)nlOM@X zi}{fEA{{^WJe)|NMn6V)JG912)(~$X)OAMZ3 zN%Dqc$B}dTOMQm)ODJg++aWL%39 zrlens5~iddixQ?}eikK6N&gomOv(5b9VqjYl5kPRu_*JBlI>K6RX=ApKkmA}Y0bcE zXr;%Mqgv(Bk+U!hlGdp1?H`F%6ctMrB;mZ`(8fA?3c+zd5l>aL2$5QGOh`d20%Y6M zDG#XL$!t)Z%v?ouJnXC?BSOeuRR$c2ur=l!=lo0loI`xgi6|ZNo*q}4NAS9;P!VgV zwH+`gh}U^EobtKZCO_sd{O*Xt=qL2Ns5I(H*nmPdUd7B_Fs)v6>HtpkWu#=%R0}d0 zszm^4E@Z5$@483z*ANjB>~wf87gs@Oi^C(2lAPbSAtJfMGAWBN-2!-{H|m^@1w$Bd zkJg?6I(>U>8cx5x>BHlLU3`^$@37#ZLf?$n_OX1ZkV^eNsm=~#^8DR438L=S;_WX{ zN5Yp#kLIOf%#|a}lyR5nAc8#AtQ=G+#V$tjv{}k*yEjzcs_-XG%#S6@7?GiPzKlwW zZ%>>SubVeZ^+u)Op=6ZI`AiEk8I`-MI}@T};kg$fd0u+uECL4#=9rKr6Rs8_dK{m@ z&e-4ar0DjmP5YEjyp1tUj1JF)ooR>ICeqGo6A`SKLvo-mxzRV!&I`Y7IN#G)HVYzzXU-fq)(Oq;+`_IjQs zkt_|#*oBUFJA?M(_j%Ysoc7xA`h=4&A%D?p`&ezMob-wKU6~TDlFl}m46=yeYoGQZsD>m&;L z4FfYND}n^*isONkaNmm?6TY01G9f%66?yv zcuk>r$eU%NO@|bB<_X%MJ3+}8Y_^lm`L+i4m?DH@9UOT|61?;NQ$y$G;aB;@ZoqvJ z9IiySDWA%=lJDQ)I>Nl<>*KKs?TW{JLf%d~;iJ-4EWGdwi^VIb@;tFL=G}UE%>JbB zU0`_EJ#cmg?}+@N{|=M``0kIwX5sg7KI!_;vk3bge9A9%_FvS3_f^Cnn?3vQvxkE3 zV#1Wu|99;g-~p$e4ivN$D=>BTAE+Vl&e^{s6#aqD{wMGX+wd)jmk7C{t6bU1W%H`a zFBc!FCp$ULX6%3Nf%k7pC2cnrk8>r@6-!$?g}|+%InM(+SBsD4In%Yc7N6%`x>|nO zH8!r87LQtLv1qo~m}RzRx-8DD?dK}**8s8XG7oM(KZRGgK@E>r&c~5@IyfFM&#QoH z=4mt#1)j#0&0Hma3eO1?27@~9O7L+jhxYh{Wl4dUT2}ctn>;xC@bq$Ez{-}gJeysP^$4+u8-`oV!YCfj&WyF^Ad9_8-;1PTSIWAi&Pa!{rlHY9RRLI7Ot>Pnv zia=43*N4~=VoSwX5x#yJgKY7BtHhP999P8ihvLwN?`2&;sSCI4SHnsPC0>P;-F&`m z=XoHJ&&vq%Y8m3u%=|ou?}27j9-OfurfzF9$GbI?_h=@J0CXwN^RP%aWI`>sx_3<# znHX-2+{Y^q6|*HCnu`O|Qcu(qu+>V1W*|fNa>dP*Mzry z`f2Z=Vr9*_xI1wodizA`CoO>lxL`r(MKyb(**K^o*-8utgsU-d((w|8R?>du^T%>ktAgh#?Ce%M25N z55WlWVQg(dYgmle2q+pBNl_sOC2*XNSVi@usPYj=+dLoDZ*AvV!SY)E5+A5&&*ynm zzjZHn>pl@LVEkoFXM+0h*WQ!};PT{>eF(1Zt zc!!Qim8sA8HXW?&3>0lPKUlir;_tR8Ukuou(Ce;lr36R>GCnp=)29%&2YbdRCVP9bbin-rY1=r4{N3)t@0w)l+}B=mq9h8 z#$EXzN8DhS6?cuCTHK3EGu{RnaOc0tRI0ba~Qedz3C zgKJR#wRL+rrh$cZh!cRY0{Y%y1RTMV9EKtSgAsxW2RlfIC!o>B-3p!Pf~%~go-LRx zD^4y{@E|lRbD^z3nm%+{ybp?rs;+ArXcR~Bn!^*pP^P)EolB}Ku%V;(jEJDr9SRV-loi$i#NmzLl(7N90gS`f4znJ-d*p%LQ(**nf%wo(p*0B|B zITrWLmv~$5V@zH1_Wgb5J9<* zG8@3!Mj1qQPzI6jq6{Jr^p{W#Ex>Ys!QKv7UVjN6WkT#BU_|=_-^)bWgu6Rh0`pw@?tVUM{SNpyRui{T& zg4l@pTV>V#LmGZGU0G{g3VeMJZWzAKVY|H6x{U2tJqi(&wS27gEhPO*&zJD2_SLUT w+d>i;_;0oUVen4y{T@9u4L>zJ_bPDq6%YOi?B#1OL*=lthM&%dm4KK31tPP^EdT%j literal 0 HcmV?d00001 diff --git a/python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-Bold.ttf b/python3.9libs/searcher/fonts/JetBrainsMono-1.0.0/JetBrainsMono-Bold.ttf new file mode 100644 index 0000000000000000000000000000000000000000..fd1ab3ccb9a36ca6aff5abe4453c5a6d1203c523 GIT binary patch literal 141824 zcmeGFd3;?}xd)7|wfC8aBq#GcoRfKKl9Q>KdfKLK+NNzfCv7^UEp34^bf6%%HI&L& z5D^ifA|i4D5i1r%;36U-B4R~Etcbt`^m37lh+LHR^!+|-?UlXHkksGj_t*RRyoH>d zz1OqWGp=Vn&$HIfVVp6Rjt?6v?O#5B9_~2PYCgf$HLripT$akt!J~TIXU-d1vYh2H zAMPW#Pn)-V<*YB}{bCF5hZwu)g(b_UwY0wR;#U~ExD@RhH=j3l;n>l@mvKMNm~;E) zy?aWhpPQP_wER)DKfU$BZRZU=d9sIT1%UUT+BUZPLOf4H`wCoL+je|r>!GbrT*=sh zzc9X|efwEsTT-{JX<__-0MFXD;{orp{{#2KxG&#+-kwW-Tz&F2-0x$|{n3sKHjhQi z@3@)qe_&j@X|*3EiYFY9A7ScJ`Fv)F9b&*reXY#tk6^VtHnkPWg$ zY%yEHhS*ZJj4fv?*h;pFt!Be)gsov~*(h7b*0T+4BO7Cz*k-ncoyE4YZIIj9u&Hy| z4t5?ppIyK%WINd|wwvu?7qPwUVs;7p47-$lmhEHv*#Y)BHqI_%2ifK95c@p4f?dh3 zVpsova+y>v+_0Ey*eQNe^Jw{6z1F7pl9QKB}LuAJA{q@6aF6pVD8{-_$>}*=(7%a$Bpd-?qZG#dfLf8r!Y5du$Kep0&MV zd)M|ayT_hyueNvE2kfKv9rk_p!}cTg`|OX{pS8bif5-l@!{NwwR69By1CC+GR>xk) z6^>gRcRL<-JneYN@uuSgN7R|(EOypAyPN~gVdr+|CC+P{w>s~0KI(kVdBXV@=O-?Y zE8kV^>U0gbhF#lTdtHZIH@WU|J?whM^|I?N*N1NAPH`8y>)l=M0r#+bt9y_8ko!jW z9qyy!5eEWQde20BE`HuMR^4;${=6l@twC{P}OTJfqZ~5NyedIgoxA}ekOn;%j++Xi+ z_4oMu{e%7${!#xH{|^5i|33dA|6%`4{v-ao{P+8h`5*T`?SJ0?lK)lzTmJXpC*aY& zpZM~Dca6BS`_2KjCwy=DJ>uTsFZD0O7!UeiPf3waQfgC1Q?5^WJmuZgTscB&YwD`h zOHyw(?(R-~Huc>!m%K~MP3uY^^S!L>te&h*;@**UN!FcN&u4v{U6?&f!e_6_z9jqB>_;Sg_Osa^<`m}4 z%GoYka}MO(k@Hl}>#{ZHgWUAo*4$OOd*prY!QA_EU(P+57nJvToq3z`4(C0X_lmf8 zxcF5Ikbs z-xqv7_!oe}i7(dMWguD-PTNc9uduUAK9?;1}{Yt8zaeKmJVh?+-g-mLZ2w%3l<9+VKZH`hK{ z`)*xYUAw%m8?4(~cSqebb??dhdab^^es2Bt`YXh}t^TI^hwERd|5&y*I2xK7)-;Sa z+-=-H-tbOiT4Q_TsJM4F?rOZL@!`f-8viw|a9WpynbtpT>$Iz;-8W5Is(sqaO{}T5 zX{c$hY;QW;^l;Pb&5mX%Cwud(<}J-vG~d(wjA*wtztsG3OKyv#*VZz(WqZptE%(U2 zEswOEXgL`!4);jd@PhD;@b%&QWo!8H@H?$(t?jLnI(zG`)|*-%ZhfWI;HJ&i*4Q@O zcA)Jp3E%c;+gt4^?XB(77dYB4Y`?Gl)ecWbXNS~=z2jiVv5wa|ZJo8UM`xsSSLcz= zr#k;)+()N3O7B+|%=7&nLabvbDFhcUA8ty*JC&-n)CB z?R~e;*C%PS^>y~G>D$+LtAy>lukZQ3_h9*UTNCDyMF#do)r*!b+PCP=MH0Jx(QAuc zi(3{CFP4(FExu{-!;4>8EbY&>#IdA$$$}-@CFUiUEV*;Z^GiM)%9Zy+jYGpj2Z!z% zdO^}U^w!dprJYMRExl^#0}^8CQ%m1k=2%v}Y_7atwqn_VW%n$5VcAFG-nQJcyngxO zh3jl8u+6L1%q{Y+ZAB&4aRa z&9iIXUF%zGSo+$owWDhftUV&z*51GNg|#1yrpUX|;?cguIa|L)VP)#*J|V1Zd|*JYoBqQWn71h z>w4o#nI5vRu}h8X0pohmxL#pg4SvV&Htvra*JH-@3FG>#ag`Y*A2awIGx!|)$Y}q> zxElO!0u4mRCWF6Cg~olQajiA35#zepxGpoUqsDcsalO#E8hmXs_}XOfwaMUX(=`U1 z!QUoBpPTM8o8Iz+MH|L8~ki8HSP_5HXHnGUSK>o z_}aY1xZh=5$BnCDmz%FQ?vEJPyN#hY+PH7t3mG;L+)D)xo_EHv|nRf?=Y^09Jf4a+`nL4UpKDr%j;RpxTYA_PUAWk zRRWHB07o5wBmXBjj_lm}sf(wmu>fW=2#$IJM;$@pxt98CnlXy(FWbo)Y0jHC2)}I8 zPg+~;Go38;y7 zMVlH+2UI00CCMZi%Qrz}qDDjG{(C)ip@AU!RU3SL!K65WJE<>@N)EvjZ<7c0Byfk1ZS;Wr1cnCFv2r@>?=%;24&CZEdL*CSy zIQu1?irQrORh!hDIfEtsFJeqJ`&T637pewkvHV-{tNy0< zvEr9~(mo}BQvO!_s=sU}{eMaH zn#%s>2?+I>48Lk49nMRTzrbM3HEJcQO#C;+jjr0P-amGHg+-1N>;02M#(V!S6V4d_ zD5=iGbGb^8WNtR0z?BuN)%y#miFO5*IE&N>1TiRN6mMsP1`Y+0sAoBbgp*@f^=ze= zT4UvGrPJDXGC7-ACYQ5LbjmFSvuwun~jp{UW5%@!*K z>UU)gv9m?B(rD^;S_@-irCUvk47w!ev0A*%gtCkkuSEeh(Jt|t5}3>uP1+{6MFpeA z{P!S9L-G7ltRL$glJWaB6KaAVDP^nG3Mj*B6@|%s{2Uq$yJPY*nboQ`1)(%gd@^RG zW=_J~zdder)z>QT*zpyXWKv8H885}J6V4bPOLVEok0NoR`wkN-j(e*V1=K{lqAhXm z*U)I#o21XhN$0+5QxHlL#Ahrus=qn+B^7FfWLmW1ubBeBf=i}n)kYL1#xma^-i%qO zFqjnHNv#I|)T+3%>e)KJ`aQXKP_40J#_Rc;CPfyy;`Mx&31y*=q;A!-fSPERc>i54 z%*Za*$62{BBgA+vEc6*%NZH5wI17|PrNTNj7uK<2^Fc)s@w$j)ggtWkseDO%J}7&{ z!m1vLIgf?nn7P;w>qyLbzD2m>;MCZOIgbT8DebX2kAQ|n(|EDeC)co5GV*t#nJgw9 zS8K*^g*2rXui?9s@J5(pHEe-0XjNFP8WvDSyGdK3KZ>O+*=$$6m6VCU*l58Dp{#9k z2$f$=4v}i{3xtia|1c#3>r*mT`JrgF$YN?PY4pi>NF_WtZDQ&>5x?qh`ZOzk**89J zwn};a6!_;0`~;!=Vj_OkKRJHccPjjHrpH4_t5kFn{&)x((V7sG$?)HWutxZj%X^4> zi*KW+CYSe8>x4y{60)uw3U@4|9B1+wsd^>GSyV%f^)^;NsUn1Ln@|?|2&;8Q3aE*8 zN#U8fkWx2oE*TdJhQ=hwc)wzy&ycRP(pbM@fiiL&g>^D66y7s)A+1Y|m)tuln8~=f z-<0wcTv(utbxL8iav`85Jch!kJniJ`-wL8(qVgyGNMb#7VGLAkkHq!QLUGKxMt(AJ z{rgUW6ckOQg>3v3qL-?NTA(MWmh?#4W7j_cZAe9ps-V?OoNTr!c;%au*{On2W5(O* z14(!zt}^js);zvzLQU|bG+|b}1ymd_G*-Mgcd zZ;nzF{~7RZ%PV9t1*{o^kkM`rO{kNllff^c8E*$@z`v;`=6jQ*dE{nxHa~3jt-c zo3tg`LM&y;>`7X(IRA2YcUx|3Q~_AFX0d?Zq+G^IvQ#Q*;rgsLOP z@?af-}s z2PQ=py5jxkBPNuEd%|j6AqCV#`{XjRQZ|()q_ocrk?23I^s4?+mXsM->7>>~Em|p5 zv2!w8nJo6U;#d8X<4+!YTk)&Cjb!*YlU~HWpz=+m6N5uTpQ?Xy{Hir}bh)-k`Ah#j z)f%WIJoy-BhLAC`q*3zlL*mWgLG?Fz5Us`YAaBo(~vL7x(G z)g)ZR!pL5UQH_N@g9{pODqX~4wQ}*Ff~r`&G*RNl;?3EvaP{4@f%I2+8ovHuWj3BpH4aLRlPPV(bZgD&MkbLO%-skEqqC=25Ha zYt^$FS@Ng2OlGYT(mLlz9`Wn2z$qnSkxRUupR~}eMof(R1e8I)36+cs)oSuPndgv@ zvAiU5@ne&|DY&peO;DN0#Zxqv;aL>kWVT@Cf#zmJO#j{2WwVP0h7BA&H6@+XvArdWKwJBPZtO$~^pA#h(e(0%b&!Tlo3gzhX zQ_Y9i8K>F^f1LGURMRorw!c=VX4{suQ&mC30b*&)|Z_i(h>#P!@kjGr(#sRxV81RPEg+ zYVVDB&z$Q@%|whe4@sGMbb}@MZCr>P!<`)Yg|Aj@UF%> z6E~_No7~$aazoY1_*rP7)6lG}X=Z{;)7x2~3~JS0xz&CXxv`FQCT`3*Mp8PnZfKsJ znVV#@F_9bP1E%IijwQXObp^6YI+d(L_&eev)`!Q!nO>G489PorEwf8XK}FW0)nZAB zYlwAprJZE>W36U#2sy7Lw}})g4CWeQrBLcUKAN#os3e{YzqACxKbZ_oTTwnfkwS$b zIfYV}Q&E_Fzst%)@|veSkFVe+=RrbHWi^(EUt8=z(O~lMBDGF<4C))tgVHa-DJ{iH zIUXVwzrbMe1+jMc8-ZVqVA5`#52{sCV5K8|RwsvypVcoB&KQp??JE<{O&|4J6KY~N zORa(C?yZ2DXjil)N^!B^Q++1WqH3cttTB_cL@PCk@z%?6qfd=r#pSVDl=xMYGC76T zNJ83?q)^ac=viSf?aWGH^2p!HznW{wD4cArTJfv?$?+$jt5*CqNqvQcs1dRzh*&$f z;#d8XyTSMrOMzl1R5PZ%bLNc8uD z2E#%W22(;-3Z;f*o7EFoX;i-}PGfTh)hcDBBo^-pULmQ(cmgTcSWobK6AE%yV-fe( z^<6+sv@57YIV<}i2qlGBDM|=a&arl8rAGBPr6{RTBP8?ZR{Y8Rxr9)@G~Ujw_*MVp z_@!5kxARp(g6cDwovSvIklc5W<4Zqd#c%F%sJP}t+~`VnRx47HrSav?xhcn3E0T~3 zMrkbGiUiIWE0XpbYeg2QIILDH`h%c97O$xRD;KIpL{v##nH8Z}E+nLaA$Wp}=R#nP z;X>AsV!5zD#bLE_@v7h=7Vnw4kX%pAg@T!ki$4|HD@aYa4ibt5Q=;Yvc$JLA@OkQ3odDiZT$3v^74TG1SvFDFXF zI(DM3wsK}d%2g@WKCO^uz7sElKan;PWT3R6PPkg244xD>R?P~i1fI|2D@@uZ=R(0u zrped;EiT?LpzXCe=z3WW$WTW!eu`5v4=5hKp81*2(c(z3ufz z|JWExLR>Fw>&y`3BPOR%^*5s&Nuk0)edD7Xg_*3H`X@rD-U@;w6U#&V7*j(e^6)lB zG(4uDf%=;~yhE+>uW}d^Ci?JLITO6{rtz7#l^bOpi8D^%jG1wg>R4N~KpFHYtX5l< z^vB{&CTC@XM7>(&W92L%6tD4?@Mn^f;pJ3+Q;MS15(y~@TO+kt3aoU*OEH!b1sOl% z|6)>Pp)20|{&fG^-Q^)CBzsV=^wRtF$RAX;ZO&OF}9bHD){)@0#>Y!NvcW zP*ZRrpo}#};WhPW)rD#$s-(2dnpi9s5>mksJVC~DAuz{Sw5;OATC@c!4y#oc0?Oc0 z;XN}KQrc5- z%`s-BM)f!6rKCcQkZgq-AfAm(O#a1+cpsh!A;*{a|4RK0X{or-iB^6Elri@dg~{fg)Pej|)F)dL%`phm@`ZD$Y-aU)iJSVG{XQ z{gdNQo+nuOA4|H%NQg1gIwOe5<*)iD$1nTF^FJ!am7hv}vHYtx!k;Mr52?Rlt*XC? zU$o-y`p5UR&LXQ{+ae%MJ4)uQ6by|?knwAbg`z2Dkp&9hGoZ#Iomyv+q+e2KLM6_k zSRRtiBI_7i614btqQIb*TBQ$BzDTqhK2!FM*P^7&q>%c@YEiW*PGhaeir*eDC6(ND6Cd3O8TV+6y9WOj#Z25uj<4~ z(K?3w`!?y|RsG$|e-PgWwW_~tr)W^L8d{Wn; z(W2^aN>Q{LT9kd`wW!8bGD*~;qFzD7YSD^c$-%_`iO`A~;mlf;`Gq-7vbm^W6g}}; z6gbbQMGKUXvnZ@qElT>O78TwzX;I}a$+RfPklB${i~qD}QS~>aC|V6I%D(YhRO2d{ zBx+GnuOMQzXvMGOVB-Im(4rb48UITKg#27?tO&9Et2V+<5V80L2E!Mr{_*(L2xr2- zatiz_1)T&jIeyhYIeytUUjA18om1d<3j72yIeyhYIeyu9D*TJ5z`t1FCy2@MtNzLH z%f3_LS9{3G^uJ2rKQqMC5>ow>Q%J3eF^^S4>Rqv96fP1JTHhC=xx|>&LRJ6d_+{Uz zBs6~t4b2z$31V^$ss73F%f3_LSCMZr{wK>^to*C~$?;pGZfneC)qu1E)6!%8yo6LR zsA`Ki%fTPV z6ZyA59r)Yp@^>*7|MJ9G@;lt9r+ojEB1i{+RY1>?m?@E8t|OvxP+G&FTl_j;2B};PZ*b;Nlrn7=xILLX?a2%ejspDRmQ<(ekgv| znvLg@7fowq7i(@jBVOi%N5keY+Fm>*+9vEon>F!-MwO8+wUV{hl2q{TLcrJ!d9A?w zmwa7BO=!_E+9Kf<9Q-)$>1|=j(Ps?$sXbPj=98Lu82%OGNh-z#P2w-p%ku#a{;(Kd z&I4J+mcOg#1{QofAn{4O6IW&{>AC@A*W)_{m3&AV1ob<7(pzAPQj#i-vA}pzVbCR? z(0un`Brl*yv-2?ngw*&cJk{|-vLUGv6^n)5ft`Pt6D|MLc51Lr^fFsa z{tb)cHyc!tB@xw6;8X4yIRq{AyW${-77h0XWm<_=tW~4`XYt&QnJFpTWWbLbBM*wx zf(|aNidxAQG_70f(s~38_3Z~&s{pCyZm($3g%2_;1bzC9r!;$|l-10XG|n_|N{b|! z_8XEj=O?^?HphT7Y(nc0T&Eh8OId4>wX*AZ7I;PBe2pwd&O$ZsHEjVpuf}sV@2|)2 z4*r}N|9XSFg)ubIQ=&=Jh$E7(@IL}0`5)CvnlxK!9h9wPgYw@Hp4dcxX$4Zw%S0os zM0QAXKuC_XPpvDo<+7`2O%?4E7No5dZL7qn($|=@N@^COIxOWe0@xjZl=7gbH2xax zTMo~$VBd$Zks4?O~@sv1SPw=7#Nq7#pQSckJ z4dOXbK+mQ38jFLUD_Y1hHfx(E;N?>q|Ew5zTJO|)vQ^tM0WY8CGmmx&;P+__KcaPs|7Uw$i0osp(rvC10N<41zBwTIeZh|6=h>!;{U3k8IkA_Bixq z)2`8malcSr`FhsC=iq+g-#F8pxS^Oz{97=qv=o0KnSHA1zdxPEnErdzj2OeWo$RDlR=v#;<7zOtZ&4V_&!T$;W&Js_wgy+c~=}o$P7$6ZTW~411RS4DV|{hrj&)Jo^RvCHob7fxXCn&3?mP;#c$8{35=W?`GqC zJ3pTf^VNJOzCGcq{8he#y{hG~4O)v9<`?s`wOlPzOVhHn0KbHPMoZ`Cu|M)Pe1u=9 zW%FHHo|b{{qCLpBfvRt^yV*nh9DXjo*LJP=URwHo+I3n!zAN}~ehA;uitnt|I<@Ki z8hr2VHSAh`75f&upWVg2!|r3>Wk=cf*aPhQ^liiJLH0xTDE?;CrR+9*r_Ock3;2GQ zFS6U&P3-IJ2-8S!G#cPzySOsloJMRxm$U z9IOit1UClHs(7yQ=}$F$TQU73Or`7_>;YiP1*T_!=@o&A+fr;xZR>3}6pxo?l;+|) zC`$#V=?13pB$xuglsAECi@?NziLDTd7g`e{oi!P6@6c6J*-kA9YX5$-? zH~ep4j{NC2Z@At#`}HNSzxVq4uV=sRe;xDX^-?_l=bzsB)35)ukFi&W{_srVC)m)U=}|pto|X)=Jo7VSUuVPW9&cqI(|Op z_Kz^Te~kJ46z1NYnB7l{`TaA@aLjLh0cQ9wFvow1`TZ+=$MhdD$6usb&VI}OfSLC? zdxO2n{tIXR-e&KxKeNBE|7P#9|H1iz#rt_=HKUE=l{v?;J5Iv@_YFa{xyC-{}F$Ze}jLQ|CoP| zALWnoAM!`|SzG-w_T^3tu?SFXIRKD!nFO|M1(z&F@P=S%Pv^iI1C@Or@R$f*kl$^sQ- zfvU2=8h%&wPF|lGeMvihs;Nb5?mqP^i~{en5MIO&99~uxp>IDE13T;(xoeRPg^g}) zF`jyvH$eZK?t;3EKp6jp#D72k+5Y`MJD?qpzRpWeHSyl)Peho2aR)GNiq?eP%%O!x zbBBf_xy)u`V;cNfFDi0e-v}hS-mNb#h|ssX;i1i@jZu6Z+dL9UK{LKlIg}BU1OR)$Yh@_t$O;Tx>~;_KEZ?-heOgn8R=J_2W5;Op7CzE4y*nJe4_Y@m?VFCiHhx7$G~0fUJ|(2+ItftFxnhJqlCF{bOY^~E(+6+xRjzpf*O zkq{7uTPFBz)Y=~HZENf8ZENk@ADl7V-ab4d80;Gn*W)c+T`gDANAuA1l9K5|%~v)r zonBIkUqHih5Hd|W1^dros}>&3fhCnfKlU+ZxA&8!Xd7TNy^C}hmTd#{)lK>;M$F;R z><(0Rk^ziFJgs4}=QO9gsICLn+*SsoD$8hX@8&S8Oh+9LRxbbP{k;6^(eiIpAttG+ zF3l^g+Qnz@j1KP79xJM-C@L-|>w&yW1dS&Ub!?UtW`iUhb5hrP77@e-kkQQ-7c4y5 z4EoL1u9((HCVGHnU87J*%v(X;(=}*V2~gZ=08Om6miZBV$y4-~?jtol-Uhcnqn;PIfy)MXo>!h(I8k zA~dicRS}QrFZOFEj-{3rW+XTbjvRz( zz1j)H|KksVO2!8z8~G$ha>5RH1n|Roukx1$7cmfE#|i-(wshBED)6*1AmD`jo{#wg zf%FN-+Y#0?LRF!xP~extS0BFbJ72hZ7_>)s|33Or^qn`~1f3+DF8H}r__-i*m5mSQ zp%m;OvdIh6#IWgh+u4{yY>Z>lz=!F&m_#!)%$(o^0MRR;FlG!8W=};`Rar%5In5sO zEpWv-;kGatOdW3%4y_}U!b3qAPpo6;DrlKo*Ei&GUQ*h#vh_<}YU;E(JWG4)=C%|Z z=nA)VcejMQ_}Zo=oyFbZEvHU4&TlQ;-qPSbKimz+*QAH1ceN}hR{@%+4%esow3W#8 z9N!P5z|VD5^zn}Nj<(9GMqX9XX3w$@@?c7V*N1=Jf|TfM{K;tK!OI*@xOo@XHK*gU zi+;KPmo(P0(_hqldLJ^C{SkkDo?o{i&(Kh4fZ(zqIOMuEyN&O{;;Pwnl#Fd8C_82< z1UUk79Ai%IV2-5)5tN(>)INSj*pWyodRDWlGW?fS=7P=UkoczYN^sH0XYlq)A<$wi zl{@6DFX7o(LY9TgYc!8-UU$!2n@6h+h3C}HY0lFz&b@NkXIMZ2$MZJGjs?XtD3`^!kv2VQ}_PoWp%%3LZh7$U9A?WWL&!h=kQGpm@B6A0;9kZT*3 z*@<5kyuD-Jf*Es%%6f-8cAwkX*VlRO+09+ko3-OxR?S*wciV>tng-j8c;uDo;qY`` z{o~e#hA?R0Y&O=#63n$S)*b1@?vD+!8ppH(sRuwG8*^Y@!PDxxSf*y%7vqdhc4kH( z)#Gy5Ss5>Ly0hyz*2kPsBW4N07qNPFgpD~9z?|aEZ7ciAQ5))QY3=Q9Z|T+c+dp?s z`!Jku->`O~xMNAvw6?Br^yP3@N8_g5x>gZEY{>Yz^mFLk@k%TUh7yB356nSI(E+p zb7#0On!Q^)Ue>cB9Q|i_Wlx!q&nn;!0JoP-i`0V_%q0Y?a5jbtZ8kepXt#IU<6}!i ztXb;6Rs6B&Q@kMhHt*AppZ~-2e|bL1p8RpP;42g&Xbwy&#d zoZbw5VlkZ~r|qZD6gDW*4_nx`mky6CS5IgGvG< z)V0WoOw7oQ8dyVic6OjBK<*oUC#RU_U^#5>=!5^JM5(nMYm#t{sXRn06C7nWpt1rT zH!ZO_5Q;dF2{~*7H(!WqyzaI;>>hjT_6~g7Z5}Mnx_10+Z@$;>^Z9Xk8;RC$a$2(T za&xjXY69Q>cAzFBJ0~|Ut0gDeAY=gR(K5h834`6h3&CN^gbuCRMl-7`VI?3uJDak) z0_5yOb`zFtO79GZF6j&f{`4J(+h)^!HoN_TTkmsvbo>s=Y!^TYWofk;IXT6Ro%#H6 z(Dr&}ZAMmBZb42*QFJz_(1d?)!EY(>MPjDGXX+f06EZ@0A`$EfX{M&)NNrAPc6yrM z=XJ9bo}wHj`EYooP#{P7aX!1<2t@bo-@mW5r>9jrUNU`Y3r(fy+uHGS-sla7sRBuP z%v!A!{yLkLv${xi3Soyz4nnA!GRfHyymj?`}bea+S?a~ zeo8urng_P#Mql8iNYg*x6K?GzJBRNp)BeEf#2RL^b3`oT(2JdG2Y^~99{|y))oeDy z8dg$Nke{0^*04HWN2;+yB;*7n1qfS^;8|T^W(mxSbjR!4Mj{p3xHhw`qqnhk@X@lK zRqb1`qG>&i&7G@<)~v^x;wfxf&{)^pQd`mP&kldHVL@Br*`qa$jg6`aq#*bmmWZ%*0KLm&z`Ls%5SY*U9J3d zlx^Ju*{YZ-K=e5|Hf#o^*8mpOQvyY4I`TM#PNt{mfP)L=UN&wgzd`|%({|bCp1K!~ z>Cb`kREm~T%L7{8sV4GKY2beV^uzN+Y700je1jB)Zh-foxkMbL0l+pQw`!iF9y@KzDv77lP2zX_2vcFcWSy9 z?y&bT=6_0HT26jpVSdiE0RJ%d4({M@In7zQIXSsm%{ftuc0oVtJ=ncH&PvHg=LTF@ z0hENuV?#uR=|%9-(4ijZPAUDVRFNj!={x^-i>_NaW9H#l953KgCy^#`B`J|N58Y8}&KO4Lp8UAE|M{ zE;J}kRSM)fWmF0PkA%M^&>|9ktiBznPPFkhCldZ>^mp_A8lB6l2VT*R?}=t=wq3jV zU-2n;`Nipv`E{sGds#*VcEVKTQXbT>=5@5T%eWZTgI~1OH?+1k)VB>cw6!(Vx3>dY zV`0pVG~|vYsF7XrP#$+URq9v;a^WCEgyeKECq;C^P0U&3;tq#x%npApsxNhuL&EEJ z3vd+~k3<4xrIqESwPm&CS;6#Bxf`o!2fRo(Y>`TP&JdOQC=3m?iJY>sg9>I8HDg91 zD{Q`IdP_~G>$Cn#*R`$cDckGvtedlJ?6v9hudZzh-&!-XF>hLBLvjAV^1NxYYodpC zv`0qV9kr3#%1Y3QnjqDbUW^rP_!S9*|!U_7W27h=zHm z*~gikY77S`^caJcKxVYqQdT6_k>bMqtc*aK&%;7Iq}CA>L1odtaw!=Rpr6_yqv7`0H`hI#cpHO zo1nIcmysb#LNlbz7I9uLq7zZ#>=__DN)-rn{A$cExEBaxq+fM2@EsGXt8iOAE2$^(l3NQ5C=n5M|2RP8JY z2uxCfMDj2yoUOLa3MJZLMC6?_SVka#LZb*~Gupy!R1rn3(NTnahnCa!uWT6l#_hR< zFTQwyAK1BgVCDG*ISY425Ap*B>%h^$uvB<#Q_AfuuhP!&~4JN7Q5 zse|MVd)=O)zL71LW0Rs|zu0>@f2_Kwt-Gg{c3wE@ksoV?psSkQEG;5LF3^q@3qs=; zf*75`O^A=8FvCq4uC?3*wT7l~u#pN0mJq_m!okQhI4ds5%gxSkBS>J?yh^&CN{V7p z`$vgr8g}l4*~^R`I{{fn80!e}oUz^RaYsu<^|bKH#p~Ccx3y;N1)gzxL$GFe!O~6Z zcb)55GLTgx}@;%mzS=;MXS8;zMav3#vq@hF};Z&+eVu72noqtMxb1RmJ@l=oHD~k>=~aP@jP%SvRXT^aqrd1t^|?wo`^HF}ADbFJC>(0T#~|jR z$fkhLs5c=TZws`kl!_BJED4B4sn}bG;{ZR1zKFs^p6`QbJD=wFdkcJj&?@&vAHjJ1 zyuCP4fPcbfZa%${UxCvAI?Ib>6H@{cm_jSY z+{Ah%+3tnKt<@*OU2KS^4xc z<#yL2IYmV|L-av122WZIV*N}Oij=1Ko!B|T459f>Geh49Ib*{s5GWRc#TKNh5;;uhOb2{&f^cH<$HW?)Q9}*F5P+RYv+M&;6imx#0k};&rHs! zx`7-l*mLosABja{|yXyefE59-Pabn+Yp5pNlH*y{>3?Ai{Fh(=Vd5Tqeg z8%JN8hKPqs#?B20nGw$zktIs91t>AwotQwo;^7fZJ2%FIBUTOs^U8yn)gmS4#k$xY zh^b>>%Z2yWCDE=9zxZCF3a`JIQ1wbmxFfysS1- z<;6UtoX|`>z$zt(%31>HX+m#do<;j@h=JPpy2H_5{d)iY=p#rxmsQ>#-TH;27@$YQ z<;St+E{ZG&A-W>}Y(wa%!6rzo2VAg2iX_j5h#VUrtQfI!7Az|*DK46b)T?Ug&!~2cyL;=~XUu4yQ~X&w|8DeIo86tB*1=yW z?pTZ<2L-#lhMCnRm3`rwa9<6-{`@~>1yV4JAR5*NK4@>K1eGe<$ABRqct1zXB=S|r z#Eh(>wz@o+ljU)<5?(^ZYN}0P9Y+n8Hl>gbk>?VbB&@zlR&zv#$?H4U&J1<7ANs=A z^NL(<-Q!D7YYsImY%4CGx%R=?9UcAs9qs*|VBhNYSy!$2)!!=Wynb6|nm?Or_pK{? z%lG$6LERy*hT!0r?38Br_b(iq^9@^Jlg2Fe>!;TK`lQu z7bOqOM#xb&&*!Ga{M_?JM(g{umzceQ_xhYTYj$+<;fInV)IRE0y~;I)dzWf$E;Z$owH_j zhI>(?#ZMbtNM=R2bl!^4%(c^}uZ@IO%q?jx&qR@s&yTdX_V>58M|caoNn}QIW9J5J zBG;F6)D_xn-t}{)E$%2T?pQQ!&IYf|2B%+AkIiIhJJ3Zr@+W0G*t*5Kh^<=$!(=vs zQB+nri?xAV zIpu}iPXwJXx$f`p`~LU$@xbn5$96yT5YS?zVzwIZU~IyPY}B%f3bT*}fy%)d(>t5% zY8@QgkBWR$02p5y3KnZv-ZMIKiuIh1M!i?=pE+~-^cg(InWCq{6_;>lRij=NqPwb& zKD~qPiYxomt18N{_2Bb5FLZi+cAKxPqAGoMReySIT}_(iFD)za){NG8F(>T~n?I20 ztzBE~EeV$TwX~Z0+H`*LIk}GP+&qt-THVl)IWUmf&`_PKdyob=a?#|r>)h_nFDgyz zj&!G0*H)*d;zQh&7U#P$E$waxxac{3qgIGB0y+zjyf{^7_+F_P9>#v(tp}suyj<@) zxdI}>3E^%DADGSNY0%`8Ji=mo1~hl5j^wGa++ zxjbuSsp27Sn5gI-57*SRw${{y)pbd2I9yxR(!#%O{33Bt9CjGE=xql&^*4cwmTog9 zOYvb-Sezv^;xgTWOT=h-t#$RSZFP06B3R>Hb*-&+wXHa&tj=bvLjHj>Xq?WVRb<(P zAI_pZr5t-!#UM^&yq2Q&wNiL68sS&HaM3tU-=Qi+5wzprMZbh>hS<%BXitL{C#{lL zwh$IE9B9Y383ZdANKHq1Rcux&k~82d?U*k^{E_H@__=n^o`W=PFy!Y0&#l@j=SzOTAXMyV^aQRs|JAdKP6wJ~J8KCW=-5#vQXT#YVYcN%xgu>~71A-tG z$V~}263Nn7SxIqWE@;b0hqd}OzYB|CM+~(T?>VzVRa7yd9e$KfinaIhODn5Nak#CD zHx;!lXt+RI67KG9{qh$&*31ZgPBFqi*D$}mn6G)ZxxT*nBKTYA_+6a5JsbYj&gMUi zJqw!uGQY=*Xi}U#6Nla=9(560s7wRG;@w6@j-^Wn7tV5qs^0ya@2nK<#E~0rAYFY9 zcz!SNc-hK`8}TnT$|jKcX}^PQ0D5Mkvpf_CGl)81Bq+WCJ>XGCMjlRwYH6PV&InjT z@OaJ;UwgoJmj6|J2m+2A5j5a57v3N{&I+)aj4wP|4IS15QhmBD&5s2g7P-yMDKbs* zYNSe!3vs*5g1Z?QuA&FjW^>rLC+kOq0dgb~EX4LmVM!rkFCZ((r)^&7l#Vorf9a?s zRX9T(A^K?xi=VJF+<~905PquA!oO3tw(MH`EE_A^c!Bx*hDY)A2lQ{{BdZ?9zg3T{ zT=~c&3>qvx{VDwUZ!yOL;IIL-tc?t#Scnh?dA3bMP>LyuAOYc`2ZcJP?Q9Al#Y{mc z1D?mc*ahfzES}a_R+^D+x3#x6c2DcBt}JaRYsk+{FUctJd29iDz(-MJ7H`X-qM}$z zDSmVYD{&!TM)_FI6m449skd$@C@Lx_EG*8SxnRLe{5+OlT%2E2P*f0Eupokatz$6y zz#zZ3tgx@JER>Ir)AM@!dIko1`g-Sm+45985KXY>B{&1-7H7aNi+I7g779{*c;W;Y z&S5)r6s=&AkONIrhfSOmL49)sYc0i0HoG_hMn|@F$M_jxF>yJoswgfj$fF9g+li7? zHBH=%7VM_dE(jH0app^$WSTex)~Fdr!=4dmy_)CL^@q!=Q6ZrNVY)}F35EM>=c)r? z{2R-@RugH+-5so$I1W};u_v!_R?RInwK9%)|MZRgT(P!I9(y3+cz^$XV|@c_kD<~YFsNUEkQ9` zWpoT3S-sd25_C)qn*d6CLIEs4p`zO2S{$3tP8a)X6K6H)fFvR?wWWqcDQDsoI!(%6 zMA)@Gi`^b9w0!?19{1v&T13_FHg|P5;V?Rm)#HR_XLBHsTIN4?s&ZpXXECbn=k+vS z(bCh6qv*XtmNQYyN=KYQvfLso(q*!qPTmfsS=u$kyqQf2hrSV9%%F|03J6%1)eWg7 zg&T>aDF$nBM7G>T$GyZ3l01uw`gETrcMQQ^M~p+6S6^d=cJD&z zL0K@vv4L=lJTyQxriBMKZ`ptGIp^+0mGYi*&*j6>d$#S|wH4#}ct9IGbL{WaBB!2I zv41~bfH(i>97DC3Geua}G*s9iKe4uII2zBiB{%{gh@rV7&TE=yHId-_?UHX_v;huY zhFo_0*hQ!#T>4jEuE*=a|GB=uLYF$*!shaiv>{`Cz_~{UW;+rpUF_JPys*uQRRMkw z7v%^u(;c<~c>c^1KRmw!)ohoM!o&tqv3DZ9d2Ax2DxLU^AKJ-#RXh^CGL^vgc z^Fiy-HLI% zxAPaHjr?AbGwqH(v>sz2Copon0KJXPlID2<%^{Kk1az1`J@7@1cvQhSwFBE?CgVAL&1LVfC(aqKh_$JH9*g-67qf`tP*!8<)93u+Ycm&a9=RvIXH89g8KO}5iYu}1 z-Z8Q6V$nsI22({fj3R3B;t8W{$hz*zYGL5Y+iv4|=kL~@*)5V(s>$A{mFcah$GtO!g&6$Ie0tDDGK5-aq)go;_AKGX$PlIh`B|l_y+W#2+ef0OQA-aaIh)#o^f35eW1=+lZ zSU_jI!;EWR1oA_jgas!*Yq7MB&J&b;*(J#d2eLBP#u5(ZAWd||B+|y69gY5c9 z@%C*SmkkEX@_i1Eu3dBJ@_lhJ4qZ<^*q%w}Y9IpiA=ZRq?8I_@IhQt1(I>Ci+= z+T2A8c#+wW{3vpCnN(vGO<#)8#CQIAYeuT8N&*3gyBr0O)2C0p1v^CSO*Egn2W1kX z^##y+aU?^KiE|~AKx~&P0_ohAK_G-qtDR<&m!we;M~R&3G*Lf5zAq^Ifgs98M1iJH zWc^0S@5Je&@Y(Yq|ItWBaZxs2s^KisHNB<2&ViM}a6VZW8vz{@IFp!ADrvDHqFO6;#2;CIPlau%BZ`P1o)o*AX*% zuuIGELbF-E#~HUV=5}|xu}YPf73F7VWu)Vnjn{*enT#>uMJ0!$EK-pXMnxP;6}_k% zLiVeNyvV*RFTb6<$3FOfkw4vZFAjh$jXnTVz3+?sHmY`{nXkj4B)EGzull3gxf{0o zn)V%0{W$e8-j1XC2Y89jWuyFMyw6Itn+#-@*DpL;4C5)otfB&dj!Gq22VS(XpXEd% z??4IK;h2N9YvIu<0Ae3o03z4`C@PwiV#PtCd;=)R0u`YcFcOU%&$*Z);;hVqyv&lU zl5(-NPAAQD$bkN)QoR<78Z>z{z8yPQD89MqNUF=#?OF_{XZ%a^Ad2N>9e8I2u^HyM z_z88Co%)2|wjvtPy0+}!!bhT`EBGg;o*3Id7X2=FAKY{6t(e+3J+ z>G1#Wn{mCfEhYrtKNtCaT<_ESw#}+lP1qG5RZQQ?xt1DkB4@ zFMfLJc5M?sd;B*SU1x_#?KfVOcEL**key86Nd}{u4z$r<@t=xnIxt1MwM~TU)a^I#y$&b)H6XiT&u_+~Km5%F_zNP; z@#!6GE#xWvGUYu5tbtwqP!KsB*=8ARv&u*3poPb>dF{5$ z?Kv*2ZZ!mnCvNB%KssJGr5@YxhgiHG?^#%pB&-|d&2pd5<+9mos<5+?o0;xQ^+A*_ zuM2Aw83Hj3_eXBI0%uI@_`$DjR`;BxTs5|J;iO^3IJbA|_^D4$9jB{f`>Es6JDTh2 znp>-@TghP~!*47rYb2si@4>&1cjMnn!i!IvcDk;)xz3g9EGR84aHc{ujDLE^={F&p zMrbAu{M_ZIzN+03IaSK{ZW+Gfis3Ez zZ+v`s3prAFQ~ZUyO1y&zizZK+_^Lk5Q*orhd`D48PYta(pU3+FbbsBkJA@aTo z%!B3l`zb$`nO;V%6Y0vj<)uY7udP3%F|Ur3B=Jw}c#0Dx6tRYp2*ZDBHXQv$(H=)q zIY+)s+rb{U-49>GSJA!?ku=fF;W8hu%$P_rD<*z> zX85e8_VhgM?A0RQY^a#XH}_8qt~%uu*=TsKH5=tK_Du_|Y8lz0a?t}aC-rPtx2Ao0 zMnR-(WSz=Mvm0biT3+?|8p>E_lvd4Xv}UZ)zM17!V_Y{9)~7PNWa5f^=0)Ts(?mYA z;%SQTS6~JHH}aX2yRkRg#vV=E%xXTpmmxcgP)<`L@&t|5{0H6vj^v7$U-%gE1a9n= zmY0Vd?kWnYf%_*IqZeaPKddE)ImJ`Xu!pis){w}e$Pcg#*j)|mu9>Zl ztYBUo>GZnC)#>pRzvjjc2%H`&UmVpi%)xEU=kts)j|Wy|zWj@^nptyWeQixesJJK) zpw}E=_Ng@KvO-zdwwX9U4u{sL;pGNwzSECUVwgA(&o|zD^UX2u0(apb-Un{CyerIK zTsn6s*L*8DcWCa?I(pSqy)q1maCSF)1AjH=&sc4eWV+Sm;;ezHaA4Pv=(;JqOhn3*?_w9N5XTx23Sse>2sNPs{_|s3+~)E_;w1CiuHd;bV#OR~zvX}CKh;YD zUN#+n-|@hcIAs#SNuCMfF|(m%%v2m%N!O7rqt8wF%S?xK7re>z(^2Fz;F`cd5H2DE z2St`Hgl8fDv$j|-NvmVi`L79&!1$KHh)f{HwCq?g2rH^y94U(n@i}34h+G|Gc`$-s zcfqerXIE-F1s!`p2W(Y#1tsDaxkU|Pl8l6LjI*)uOZ2y&;?Bp$(eF9ELG!b(q>vvJQf zyc>xSZ&?9O;BmY`%^Cb4%n)xy8{pVN#;XJh8WUGrCJx)h;yZ`bbN zkLq5e4rkmmq_X_&yfFHf?mZV0;;fVXODp9+1MlaVycZxf!3*&^mD7=!(4H>agyWgG z1M4*m=Y;+r;@$*2uB*HozvtfBN84-~Y4$}kqtR%j(Tql;U1Q1nBFC{4@39p-vFtdD z?LY#Fj3GD>NE;HsWha3^%hDD?%vLD0A77v?&=fve2+*%kO8L{}|MV+-5Mqu0zxSMb z?<^Y04(;>&?MM5)=e*}V?|IL=zlxDvK%gl_q+Y

ceFIWL9^W#ffA^bsU)W7*`ppEHpaB1a@}jO{ zKZ;B0OQoXx zm$|XA9DNKA7ilggJte=MJ*ISC7o8Nm@uwbMJgam*A>REV-u;?l4TjEk zo?U!cem!2snZ8|qy*!|FLT-72{Zhn#Ar|Zj`LESjumoTCTGm_|7I0uJo|lQSV(_Js z^g`f0dfA8IZOtb%2)qGbAWMs6RiKpM33Gj(rma3tvvBP<}dNb>x)C_FP3K{v)yDjn=U|$vOqVx01C6(?ADV|R`qGu z7y%km$lJ--K!3V75s!qBMKa_aLcEHH>d!gbtjHRr{opy|%BQhb7$3oT--fk5DUFtk zxF3v$ixduNyhu_dRtZPctrRQ+?LcOl`qkcMhgFt2WvA5|6=QOrGT-9^9Dg(>>H_E^ zO2&&-WZeo06P{)Wl5TjLk*}A=AlQBR3HDvZfmOqDTuWc-spT@sE-jL9#gVExUKf(# zaFRa0seL1VD;SFfgYkG!{i@TYL#NAk0_Nv+x}034Mbi4V!3!d?{KoF7j?4UU+#`YXUmi0Bg46YTH0IKlpKyrun)Du2NDM0UH{h$XBG+;JMKyt5KNB3g5r66 z3^E*kVYE2Mv6{NEqkQa6@~!iXc=A-p@+%#I+8JsSkj(K1777j%AKqZM6c)~0P(0&K zxUi9c+iD$qoM@Hj=qeuUQ9q|EBpOM(%JUg!p~?aHWU}B=Lr@(8y)IA{D-;ZaE<-9Z zgfGNt&@qLqCkVI*CvY+mK{1F7Lwv{%2MAP)0#BUF8lCw0QH`qKKYM5sHxdGh#nZ)kkPT~h4Uc(A9x+}`Sj z1BywXx|(N?g4#Qzem-9mW@~c6kkrV1cSrFMgBJ`>bVQR>_=Pqq0m&O$7Z0x@JAa8C zbp*Tp-gO7YHpVmdwyw0x-L-8v6B;Am0zdD0su`i4cUgKrhCC%=Iiv7cU``VSbHX`s ztmf2Hj2KQm%Brc)fIF}D5Gc@vFh!KFi&HpQQ~r>rm=nMt_`~}l9N!{fh+x|ost{-} ztl$$+;?+kK4TKfXP@1g+HAAD(v|uTuJahUPk&)xA)6e^WlHb3aB6}Zfh3z=w6H-Iw zH-sJofU(kD1Rf0Ati^n+fuc}*N+4&l>Dxfn^}N2n?3Wo?S#kr7RB0!w1pLUokju(5 zfFFs~@buE%`JOU9k<=_rmB-u3#f|(J1nn73s5XXJTfO=jszLDbD^_F}QZS~q)7r@_ z%mOpJRrm?j@#!w0@fB3*rn{gzE=EO2{%^1}|FcKt=+i z4st5l0JIH2>p1ySee6g1*uQsK3@ucFG%kLGnV2jmTM-|R3*2Zv)lxSTk}&y9{&Goz zFTiFpV$&>Uq&eaKmFWJZS7E?j^-To40;j<3V74!v?1_d0es8`jUxVc~?jo-4<0o~P z>8^QFnk$zD(71Ccr5{y45ujSLgVq{!WHaFHVSjhc`1XeJzpan|I3Isod9#XdGw8!y zJG#Zf?~gL2w~gGejMg;aege$Q-WE!?OmKF*pq6j%9&7DfzA_D z(3da04@+4U6%C~82uH$uno<@p8g9H6NaTGpqJ3H)FXuUBxrky8V-;K*Y zaI%Bm={!9tQ7t^+M>4?;12Miy%{MCJhnjDUk^2PIrBT|FpCS^eXnz~W73YY?6`238 zjZ5c9z$?yCxjY3}O&2X7Y%{LerXX_*FM%8k)B$bNc&4IA-j(C_s)+I5WBX>j0UH~5y3;~5-G$8)iqCp^o*Gu<@YsIJw7TLZ&Q#uNCZ z@c<{XEq=T42(JP@f!A^31p=?KnTx=+ARb){qdJU)Pcf#zCul(66RSSCV%68cB&RK& zL(Fk$ufZkNN5g$AQz1{Fask2?&Jtu5l1WLr_8^y7Sdjr+;}`;eGzQWrAv<0F7=$wc zhrk(SHj>(a6j#|u{=}7_1OzN*YcHn_0SobOF^=Hh4++MNQet4bCZm}+AxRzv6J<=U z9MoSPk&IS@)o8#S4ZpSpdXp7*wUxgKqFC+4{bg@5F*Pw-8qO2rPLz{n!jzpNkZSNv z`zYWO`-pWyn*pN+#t}-^1R`iAl)oM4`V4T+>y#;kuPEO)l3kx9>9wFquZqb6kp}lQ zOjKNYH82ylqP~WiF1=1oPK*y^BP3yYIbZ9S`?>Urxf*pk!Pm&&3>tx2$IpkV&zZn> zxjsk98C6-$tm$%%V+mTJu|O-Reev?gA{+=^Q_u{ZTa{+&RZUgslvzAh51d7nhXic2 zH)1@oH;7abdxMqoGuEiNrc;7bRaZ-zEAR%+C(kJ~UW)IL7!R7OTdnN{vl@m57d**; zHa;#gU$NCasPn}sn$wIJhvXAS|9NNeQ8hGN0jkM+L&XqhL|x zyha-g!MBi}18Ix!m**vGtEH6$FT~Q> zTWlA!Az>P|qxOXk6eID0Aqvw&vqTA5@>De_FwZ&zJ@x33E4|#rU?9%_7a>!x}SX9mX*ah z6!yxUWmLCBj4WzzLE?biWjR3_Mzb1RD!-0IkS(6+OGkPmu*OmKNtY8Ahp-&hWYNZX z=uFnm1I?3J%>P~U(76)pp>wsTyo>Z&XkX9=FKSBz{Wo2bs_xQ~#zvbjszvZqu_lK; z7xB}#ipB!3 z6iR^3OpQUa&SqBPzZIYozugXgo8|lbYJH!<`Ekop{{=1)HIMp$=#jbqRr(iqU? zP&oYMj6wS&;Gq4X>eKM?l8!8c)kEA^2}*!;5d72R4FL<`UW`MyKO}HZHXJYoNH1%2 zo`4~Sv{0;_%Qq`0M35ftJqiGDpf^Y#-Ji7IevU(njzf|v8zfWgYE#LyM&2U-fEa^t zNMkferq?kB;ZTf0IBYJN)(ulDnbyEUd|iz5D8_k%V8(sAWJ<_1KzlV9I0NUhY2C1X z=zXimraX&djaS!_PIY|K9t-$rj}an7#%i9QWRFIJ}ts10C+SBkK8PoTt1ofOM) z!fT*$8*HBMG07!gxt!xdte6^cMkWi$)S?hCS-n1*${N-BXoF4PYEmjqd##*X@dktV zTvzxP%ORhI`Ux6l)-w5MvNc>=GLj zq!X{v#EQkBu!f)qEcN7yzY1DNR6R7rGkJ|Ke%?!*7HGYwK>%YKcoY?YMs}xyWml_y zIUGT!D&*?#6RhHWf_47W>gxjLE}~#%fkJK9qvBL`B!9V6NaKhk2DsHfAP<4~66O&| z!91J5+Z~){M|GNo)x-};x(!x+iWo4R1RSU##z)4u z8!)a*A>9?3B9PIEKRhYDMR16W?(~C?kS;5e?6thy9V1IdRHFqCg zhJ>-kyYTbHqKu7|hEel8lTIe0ld(w_VG*jB>=k}KSZ;inj`}E2R{Ugn%xbnEnGC0e zn)Fc3Wq=O;g?pFktUz_3jug(Y6QvknI-`CR4o5m_qlo@$GX*MSj<8mQexVKnZ?)jr zqW4w!`$6Ws?I>@yfEH$jYHI~Hc-LKD5KR}pj9Gz46*h+~eGk959_U;y##|8>oHtY` z0u9}qj&zJH!gwFqEyy?QM!Z{4%1D2FnZm5dzE5m1jlYv`evu4JlRK>7-I}1G)}@^z zFA??15-oh{UVv$IjTfjCGC51NDb{$F!dJr~9G0F?CY%ABK_^wcMd|=bBdt>N){7=v zb;-Y90a-$MXlS(cz|V3M~LGr6f9lJ%rRCR_0+KNLnD(6(%Ms844ad2O1U_j;PP@g0A!+ zW~hIz9FediFcIlvH9i9Lz%7~V3G>Pky{=x$B=MF|y^HV4NtSs$t)_f|*Jlv+ll}V&-D07!R}l#~&BXz)qc_c3`{D?fxw92zF-b+|;)L zs~fOJz{`+}QjWLs&K?X*Qn+UY2uB474iDn%z_Usy)j(4rddgvT-?a4M4^z90o7ny# zw(;tbb4PxDFY^v@UIc5cTvk2?o-TY~zk?;z`oMl$-S_}e(*jL=u{h~~ZYa<`y2hRYaVtbt_jC#CJ> zEt(IAd}|HvNOD_}Zqcv|3M*+`hzKmv*FiGASO?8hGYcaR-de@yRb56#-u}wPsCtap zk1y$Kd5gZ5`UOBjA&(h(_L^--A`YWfI-X19>fNUeYYFRXp?PR6^$Xy$AinP;NxuuA zRennf0)Xcsg~*x#H0w&`SBBy=*%#e!lGgktU`7;;f~yMlIOv3Mp_nLxcD2nJ$~D~_ z%(Vr)-_LFCxM6gg`}*>_Ff;|GD+e_;7&K7A{wqf)4+m9q?5MVs#H{VH6!->9yKLnFnROuQGp%@wGPFFQAmS@CHN|@n zV2Q$(A{_+PZB|t8sW0Ax!2bB4R^f((-Se-~O5FT>Xl$Gp@Y#D{ZsYThG^^q>Q7z&l zKMqYE`zw6hbhis1x47H?>;8UW=cKzm%Xt%xTWh=h@3IjK`00KY@QeF-LwOboA{^B? z%kX)^UVaTfy}z4+A!;HQG(Wvq2i?tL9W*|wr@`0?sD>|q@Z&f@->Y2nKUgWKbRiLjI3OUYU{okcQ|?ls_Q^p-FZoRt%zyMLNPoLjQJ6d+MgecpKCx?b2{?rH@RT7#{m47+qR@Lk187%iFZ|bCJnFau%0ej)QS%{F$uo>P?f)qS${iTg>N@J@KgCLZqMd*%2#G#fepA zh3YBmEkqDAN_}m#xWtkvc8!)Ij{I;g&~@X9sZCR+*3+x?7MV(B`+GXOw-tJa##pt< zh<>)|{uXp3?um`%ITybtG+Utro20f2P0>#gGJapp;UH?l_=1+`-WIe(;~yyR9c19~ z+d)NEoDejg)Q<@yDDKyQU|8qNsPmqbMw%TBqA1Rkt%WYz)0PB$X*suPy&kBRT7KoqLrXi%khPhU| z-E6aGxlsIu#u?Zy7q2oRzA7Lik-vxiP8Mb@Ql9S1Qhmr8<0uf5Mn;e9RUx=G9Y>kE^5Io@_~S4`hoL$0+bgGM>OE$zo4RzkRHs zSTo6XV$C$(%k>NI#{G-_w<=7K4M6Vbghlm}Q*;R%CCCOsCRnk%x>V4sJ_)H{?MJU6 z4-m}(oT3ob z6mdQ8k)*?C!YjDDxUX5AvMJ%<06Dw6yVKq2Xzy@=@4L=F#l92&6#FjtCt^6c9N9G7 za%~9`c-jDZcX7Lh@bA1;r==P*!x1X8NZC<%So7yF))S9KbtjBfjf35cN7}f*%*f9V zji=TVjgTS^sgO$D*QJtC1`Ov(iON4Lb1@$u&(DsXe*B>}K3pRI^MD4Ax09grD1Q?VSIM^UuAgFEc)t&b;ue(-=)g zfJlSOX=Q}1F1z^^l4S%OLY6^idN?%U&IP8-s9(8C?5H!w<8`%I4zVVZT?9Nd7V3Md zBN5AlkSVw=Ib-VJLgH+bu_!iCz$Nni5E()ao|qUCeqwli?ji0?Ia z;!C}0P^(uDf7Rrh+Sm}97qe25K=WDaR zaxje=NDXS}RP1o@Ud2&uAE2zs1%7_vD{#o%&UsQ~_@R&~wp9Uh)(5RU$Ht==94P~o zcV6>g^V6(7%ojw`f*RGM%S=ODz7cEE&v69Djn;*VmuPn&Y+{Y;!uc}ss`2FEg)dty zkD_<@*kXNjj>YWn}-&!;HXT=*UmZTz2oe9X} z!9Od}CzeG#s)T$d^b%L`Crcuq2`YX~eqaOo)P%`~1iyNuswzL_Gc|jAWqx4O*Lglu zd1hj4bfl4*wHVTIz>ofvIxPc!)_1vIh2I7G9IetPHLR?K{wyvRp4_O;uA)x^UTp7J zllL|FuQ>o6kcsL5e0wE^?(nX;$CG-eFJI`%_ z*MjSIxW|dFXn2S9eU;!ryas1i)3^4Q!)~PDW)KY&iv;~$Ue|zopqlQ$4NO?8h!vyw z#smC5Blun;piAn|)y6lj2n+LSI+vTr_HIghR)<|Q$oR~fV3(0W;79qz$uTZ5g$Z@D!-Zakyd6Pc0=d8)9 zAx^F1o#Z(I6O9Q9Q%BV0xfN)mCcJtCo{lHV^AxZ>&gp9!hYS(N=tGFA>So@UpuB^K zRi!NbT1@s3adZb>J**=CVqH zpCxf0A)h!vg~lMKi2Mx+R#OT=9wqv5*QMnZj1FLJnMACshZJgH+clU?kmJ!1xwqZt$2H6-KmD zK*=G>lxqqkGDcaKr#;{ZG~_7>ILT%y;Czb>tz*lR4RGSv5E3wYGeEtbklz#Y#zfdA zuPd(7CCQb7F5w*nyug4)m#QarHKME`N!@lWXp-beL6bBlVAW_6z5(*>5<%LU6J!H9 z+SU9>afYg}>ie(4DkP5+s~({1ftALr!io`5Wb?O&^^-bYDW3O93H(UF3Ii63pWxJk zgRFAgH_TC{MqE4aEjaL?SAkoK6Bf9oaqqhuRwOmRp)(8TE$+5ZDZARkq4w zu%IDIfrC;Qs$mhW>YhBM^q>yYr@M$|svJzn*RGxt= zg`NT%tF3ycwgIFXP$|@aQX*I*_TT}m`G?A7og=y^M6+5s20f2?t@)bggye(@aAww6 z1;(M6c&IK0xPWGf-&H=2=qkkWA>GIb@z;y4G$n>C0DJ**mU21-hxN>$gst$52$2pZZ=K zQ7)0a(u12-4d`ir$J;mdp;pC+ddTqSwAka3@FOq6Q)2-%;_K52j zph=h5pR+%gf5LgfHEPBom4Ua)Tv6akoC4mm<cO>OetLk;;d5_1_<>_*| zqCDrzMs>Pq&oE|n&%~Jd@?gV0VR)o*9KrEfr_3YF0O@;X)HM^^X6DU9G1fT7s?lHB z%oR!cl4e)ns&jK=0+(n3tJ*+f9dEio+U8(`wpp**HhB%kRW&s}<#fd4+8=cd^lWHeeh(g_a+aQiM#59ii+xJ+SL4MkWl9;3&1GA%@o!Ul>ZL$55)!ud_&(MEd;vA1w6D` z=m(HgJ;13{sxQ^&)I$T(g!eEQ(jeC_AY9|-5b~wYCmXek=6tg8sYX6Y%ofojCkXA> zNASx=KB=iGRX$0uVcp0{z`6-GvF=C3rbTsCq6Q)aV^%3w-LUF|H9*xrSVkgbBV2V) zV1wN4P*U2RXkgx9 zZj)~sY>x(9`O@h_m3N>G=h#G{@=^JwUBkm9?E{xw7UZ?=h~{KA@>!kQAaPv}wJF$- zyW|YyED!PkH%T|}x{4d1f1>o55fKMeE{OWUp|c2w-JBE!7)qRSmZj@C5AKT`4MHfnw#dg-YdThyjOC6F)aAjx=p`(OHiz+Knr#ASezJqa3yreev_OSk1d;l+x zH6m@#{iAcCKzVy$LkQ>WU+#8R+VJsTl*xjLmD2|i@eOI_6$@ps%ui*%du4w#r9$+{$wQDQ2NE1u(}%h!Am zmqfQW67=?TL*R-!+|*dM1OhD;QQQ>?lWKJCfXwQkKg4^dD>op8lzOM{pNX>$YN{Tg zPaxBg+UDu(kM%`ey((hw))p0)a;)K)E}kIHX=RpVmNjhA8;uN^-_!q{3ebP0ECjgOG)P&}r3Ew}1QO(UCy zCpVXcleI|+FQO0V)N)>a4z|i}X^+T7My@GDa}?bob(3n=lNiOhI>juOoBo;WT=+90 zZo#v=9bGX;Z?wZqiN*z_ZKGiem^ZcWaPcH=@b3Aghlh7hgu@fNhaX;Ax_@MRd_;ck zd~jqwmzy66ewh9JH;O|;Bbb><`#{5g$$JN*TLJxsG-GT*HPAPj5G{LA79SKyGx37B zY{w7Tf#}pt!*70bZk#=HZ>24F!&v0rQCy(J`%~~^R`rxw>3u%P8f8g3Nxfrb3Qs0q zCgPz`(%wvwb105-toCk3Y?PjZnh=kfC~R)^=MZaw62AO4=g}STJiuW?D=eFp3eUl@LDBuIXw?)3QV=iU8oK_saG-lraX_gG3L^Fm zv_kPgTOlX(AckSNuGjYfuaO`mAjlIaA+(4C1-XHm0|D7ouG(f3Wp~gMSUrEu-vXtr z`WA)D=9|t{`xZnN>M~@rS*g3@oE3OL={Lo2vB{hIR4ZQM&7YEqEON$@`ACTG88rrP zC0RsG5LZ)+Gzo-ckvLt!Yc~i#c5G>4V(lj3$vxxadzPPV(kL8s%VVtNKLY=w7;8H# z%lqW{FEi;?^>OAYB^SRi)nxlrckcC2Fq%h*q z!>p;2I`NVyK9!I!?SlRYof>Irim?^7yg_@gjoEOe+hkPrrAqd6ieOc9BLU9sb$ijv z(IvSAit(BE*FxWW^}mqs&>h$4DHP>W-_lpl_w}9s>QdzkUzLv^LTkFI%Kwlphfd?) zp?&)R3UCy<`b&@_`PU(kIA&QXCe9Q^y4Tb=qM{A(vEdN$lROSJMTlYws#(gcH$~;= z;aY7)%(0Pm%AGt22ICV?`Zw+yd^eg($m|wOOSON%t}a4!F~GX4-lbqvNOC3Vd&W2q zMn&#>)prk^ku1Lzxq2$jB&s*Xw~UUp)r64671lCi1KNZ*PbqIMRT7SCZKhGmU^1c&EJFrkG_4^E&q4$=p4Iz=A$1yQ~9MYeCE(Rv;W}}w~h;#khRE(P+aS$Ld55fM*Y6XpRV}kTp$_^lvn!%M2-SC7SNMZLWdS) z6HzqX1t{dI5lN*XqM9C~*3CYu5)B=}Q4(F5^Va!WNh*_*$(d}|a!$T~`HifKhVhg;vl145Fqp|>#eF$fdeRfdV*SgXIGzOeKgS$~Gw5Nf}%;4SS3GkyR& zQ)@QyLPI8~nO4|h*#Z`eu5NTJr3JH1mUxYEJ>N9b9jq`N=mKx?lNGSW6lu7r*D0&$s zXUHPdhDgRRLybJ%X%6K-xcY?5QBie<0cfrPpR83w9n6M+_`gOqTh}-O|0t= z9((K$DnB!V<@L`cqtPUNm=_PR0|yra4tMtrcYN{m^QZT8yB)!=^bHR7u> z(+;owFhtOS?7ErBiDDr;J}{m}pEXxH)`Cu`!bj)@21d}73c4g34ndFy!K*4JkvpM- zjz7x9ph${14Z}$wP2eUzKi50Z(gtSR)-sryoB!L8&mA<}ZreVZ8dQHh&^x>H{k~8L z=Aht1_uxy2UD~v}yWQE@>1^-bz3DxtLh)|T%;xHE*rs@>@*C1`m?;#0%TD@^pPyD7 zqB3{|^#g^69Vbe$Q2%47lLn}hD44F>o+$}Z4f|;5e2PLLqQ6`LJv8IyxC?#O-pPLd zqmMuS(aKLQMDF|jdy4Y&d>b!cW)8NeX75I&_0kUM?PVJ(1}RjgjP}lhy+eaz+%Yne z9DoV*S!rPsl2*);5tZJlo&pleq4-e>wFRk=7RwxU2(9)2x&F7Xo7=W*+%UIp8ub%; z_*Ob%ak@&835{0O)cVI9=exPe#*%tjv#}+y<7lrJGnMpRy?N{C$gq4u**>l9ZtFmD z!}h-ocYDL~?aZ4T4yVMnceLg+Gh0puv8^Rv=-YgM-#AjJfh^9y%Spb3WM91KU6N^Z?Gt2Jpv{bf@4=aMj_KRSb}~g_Ht5!x#@y zSG6{yNmzuiDpLqL9*2bP%KKu@{=~4w8sd^3^ldUMpjWWFGt5G0gh37d zlBq%nM+3G93h$wIT*Ju>qq7hn2$!~Jr*b!phEHzw`C^;4b`O+OTNf;M7?Z(BY%IKG z(~+H}fvLXSE96w~?4Zv#INN)8!XGkOI`*$mPG!71=emQ5$Xturv3=dfo08cqI%)$A zpz7k)D{_zgQz<1Cq$kQ9R9y~4YzC2S?_%;+adSj~FcjM{Ucf6xFv(L`8iaAGa0d1* zGS4EQ&1TO*kgjOc^VXP8tq8J+4J`l@wI9qK7n0}97*3cI(?+?<{RHUGqQg=MvC6p`t@U@>*okpOxh@YL3Xld z0s6G4qST3lD6=!bqs;sJ(XAxOXcvv>1erbD`1}2r(QK91^#NZfMmMsrKY)J6&;I)L zc&nYA1@~5p^WPj#UQpPabY9v8MZAndNbR=z~$H5am#|qo8VRw-c*pg2J7VGtpPfwBlk2|8L#e$+cFQ1@w-{X&9+UV)w~BPGsr(I!XjnM3Vgf=m&T za*>VP5HrA-1nnJ_qm<};35%7ju!AT>#Ec`GykkwTivgQF$8e%~(GvfCl^SYu| z+*BT}=yMw|F*y&fSA`a#WCe)7B0Kcn_wJwn$oskjfBU!5iQV}R-IvR-ZFlY2cGI1{ z?mc%^KAzoLB>p0EohpQNO-eUNHuZ$OrxU-7=(Y%?ya=i6HJ;}-^ zr4Z^(LAS!a2aP4v-Dbr`c6B=1L5~XTy|T2`+aa?pH#hrl=(;)71;VBeFJeL zu;eOvA$)mDUKAwrxXoU3iiC`mIfb%pI0C5Lp^6vo&c2y_gA>!an`UlmYY*?8+3PKZ zrUv%T+#GSVe{6i?#_{n@n`Ar_2;&L-Z90c%+S-G7Vz3+;>-s;N=m~sa9Sn7UK7^h< z7Z9n32sN7-3?fbmv&eFxTo-0QA@L}58zf+n+?Mz3In~_6aUX$x7@O8Y4a^DONP#H(w(0v54n@!PQy~mEt{7N z9e3{jo!xhJ+^5cBR?(uaU>mHL$(Y-}3u!V(IQsf}O1lhr8S{#?G);cZ6f< z?t_bqhk6GG`nq}|QTHNfFFVrVu$+5CtD`OLv$UGwr-IK!!mazf6s7n*I`nKiWHk$x z!^kpCb?|91c2Z3EO5no6BBD|>64vYm!4?QPeq78N$1J%D3bhHQ4_IWN72`vB=Ytj$ zqeSgQMX{Rif9T^EKl#Z|EOG~Rg)Xwacw;!H4eEKv#u{*#U zK7u%!kHWv>2X*}PG797K9)hI0B!qAW0|LSa05T=b^ZNCq8W~Vd)L=NsYdlhWzRVgA z8w{rX`a34WE+~9?ywb0~VnEd|gR$v6T;@4QvcD>80-V(!HyFiC^p?rEixeV9Fc6Cb z62U}|3o1a&YKJ93NQ~rSQ)NbKIm&HYNiZRj2_X!9WU?@iVjw|zy?NbGX?C4rAB<#2 zUKvTvXZ&(^YS8cSINFlgT;KmF1;e?p&)e(l1U~RAa;y!2MLXECzB9% zP@X3{gTW3ho2wN#P6UX@%QbD)yXEZbct#HDPJzHsMgSFU?5OyP zD+EMM%0M^vGa%tNvVsaNG7p%)g>FD@&OS))O7_OIFB_kSklOgegvb_fXWcIKf| zi5o9kB!3u3kdW@#yAN-+*==3+#mX-q&Ii2Rp?S6yd`|wob@^T!$pq_mZVSe}{-C!o zGr?j06nhf*iAztHTRL#yLcVsWn*_8SK*wnf1^`XM(>=KFxH0R1$R9zpA^8CfP%IXTL}HXcm_cGc4i(j(sj{fpWnIogJJk3T9YH!S%rJSZePZ4$aShR9^l} z$F9FFAA-&D>8syl&$1vqW=oP(sbAZT)wS)G=R4YEhJ=kkM*s$_Ho47YL*ys7tNzi| zZ?UhlF2_Uc)ig#Kzm+6>#>r}L(@xfW&B@xSQ23>r!R+BQ+ITF>+pm6`J;~vCfWv{4 z(uV%PXca6@3i;+5PYU2uPs+}wC#CN1;-sXFHqjUA?O6AdfLUP=^7%mJ4p4d6-*HVU zpRZE+?rTwb!?UYWdHr)luhheQk`(LhcpA-*sqTOPfu~UA$DNDKnX3lT~*d8biV4iUw3dN9}k^d;GKf=sNj||}f8Xxu- zgUNEij(|N}OGLgFvtrqcKnR;+w&5p>0(Y%qQJ{xGrlhhaSmVoi4{`L86oWv3CN%!; zF2L#~(EwtBUj$GzvU0?n!;D)r)*P2hRo;H^AzJ@8e#72y^BWHD*zr;JS5;2GceHZl zrR8zxi#9i?USn;c8gH^^ZkTV^=DbLN!e)){*UZ zryZ#t6cFejdn1p5X2$pxK0e!PG2;f$O^#JwcjzPhXwu%B;0Jt3-XVVn3d0~QX6wKc50ieMXEu9* z8Er!_Y~^5W@3x?Hi*Y-glty?fsqGfRI8Z3%Bz)0aQX|ix$s{j8EuoszhYsAdd&jm! zqBoK3P4vQM4lAqXHPB7wnqx>al&gxC>NfKf6CR~bzBSkVq-f?>F57*!JQR%C>_%gY zEgl#umxlsz@lz}~RDRd&`NP-MyXd)>Z37a}gZKVKYk zx3oE(Z7uGx;;z~s*cZk|)~y?%Pyev-$&HoIdy!GPtIO}lC!gQTzhh62b%i6|v93sj z{Hw>WKEZyWOvAsr2maO9!TRke6UIpv=qX19dx1IZVNU^xqDTi(qP;P zyAkk)c33cbSP)~*GbCt!ygMZ);l2G!01inHRy?j6a53uy?khyYBTS8(Od-Tj$Twbh z@(6Xjza8&q70R!LEaZ?S%CAKQD2i?~qAv;hr$g!H_pN{zUT;a_@l_Lsd88Q;DnwHo zyom93V?3ksaf}BR3#i8MYs6pMt*GCDiV7%vh2mPPwsYjg8AGihq=L9=nq%^cRg5xq z;OBe+GVN}GSqBD}j}SS-Tqh&~AtY4)P!$#0@f@TD8E$+bF2G8T_gkeFSCm>rbGsW4 z%7pn>Zv8|&y%Z>H?w4PbRRc%A zWVddaylEjixjSE)%H~IMgQX=od0TP(&O?<(Ol(egdcJE*8_eJWU92Z z>(c>?3=1GG4zh20!80ISk;>lyJ^zd0$cn%0FT-htfzr`L5Sh5X43tclFk`H} ztVc2_1B^T~yx+HD{Rci~wkj}8g3j+D=r?Z9=D%UF;am_ml;!6tr`i(^w=0kt@v%FW zZ_(EBO;@7B>FDh86oVDY)sd7dpSb!)=|yFj?ltHdr0tj$6d;h;MGBUaq*q!jh>Vjg zb_>pr!E8Vn1fEqKR+AS#@Ho0(#~mF>Jh4;>FDb)U_9!2(et@p87{Du@#yO8m zn_l`F8jIuW@*nUMv+0dISBo;TAu7V$hW7B{REW%E(@USb`hVHKf^HMiX6dAKPFj-Q zAU#v=+}@|al=FtAbAfJShjM&VWh4K&5ERSwnXm0}_ zFw!PD9dbv9q52Doc`U5;?vXaDtiNbnXt&AS=eoZfx%kinuR3%0iQA6ddg#Euy}Ng9 z+Aud8a}UIw-k7`Bh08&KK@eL=R|I0=;zC^~n=Yjpb-Tu;#MuxgQ z{=&xo?AAiyUhx1cFAZ)ye&D#=k)xMS;${A~o{Y=OX6SGGZmaxmsy`VXawu<2X6Wnk zaq*S4jO=luqSC%we7U)Fygw7~%tpR)>aX5A+tYjG@PWylj^s=vH8aqy{n;}RsZ6ii zws3i5w+rv>Kdk)$@6jKQ9R7%V;y}7T>icu&r1Ma6lOXzH6_K7{p+@Gk9j5xLxR}y|6@be=(G({S=K3;-?58%0W_Z{DdK`K7!5c{b-3cJ$*;m7w8p+m%1NTB19P@+oG6f()m z5yZ2g(8k`nXDG&nJB*k~0D{4)aZ(f(53>g}{wO|#wUb;`72l^GWOqIIsZTvvdE!Cl zIQK06ox6;GgeSSu0^jv7*y}l-a96{y4W=sXrLQnX;S5S_r@=sn9l;Si7Z!_5VU8U8 zN@enBWs<%A9R6V}w)|@;j$FDkNSr-Q9hG3$!6|G3-V>0RjS`A`!FF*5(FwRyjnMtT zpOn4o7AWL;!v$_6EfoH@Ri8psDJDa1ZYSxtJgPKB(a@!6F$zOF4py{yb?;k$^y4S? zU-^}?;EOrJZC9RRSI<_ySNYz#bMb)E7Ioaq@7Qhp9(*3RHMp9Tg2f745HxTC_dL=l zd6Jc1CX3u72A5TO1=fu2A-9CZ0zzE0|3-{Cfe@T4; zj^HeT(b*U?BC@W&)ISY|*8vl+)6HRmXF#HAAld;1S)y2fp&R2`MfU@8VApjajnn2LHVZh3N%N72%p$#w9Z0SE9*c8C<5~n>v~z@E zRy#*TXE6>-|5ZU}_1zyZKbH+L1V!=_#Q_q)5f1u0-KaEc=Y};M23|LXWPjRi37Pk5 z#S}0B79cP}(mV>JD0BpgE+g2>QT{x|q|;tHs(TrLWUuaJpk^<(Kvwi~m`!-+w2)48 zh67^`Ti}_dfSFWFoU|B0AEF)X#T%S2(b)z-^f1u0@=;t*jHS`@1(5*3PPF%OS_Q*G zY$IT9g>TI)Mavpxwy> z7=!Wys4|iabAt?XgMJ!`S8(&AUzvA0;PUNucK1dd9cC&uLDyq7`d`ns*~9*%@`cwH z_Dn<~6T1s9{_nB%bK{Hh^Y?x@GJeBQ4nZDwOcx51|A6~N)JDbpVJTjY(AMI9;!FUf zf0Rxlrlm3-i4Kz)@;;ZDhj`Q)oPdx`Au*|86J{m8;T}kSc3f>X)5qR*4~SW@EBL|k z2k(LhaJ_GIAmr(E_Bz;4mUHZj!IIbC<#IbR?h3I1(14n^2bziD7l@Cg+gW2ueE&&y zRnIaPHF%u?+;4PO0FhkX0t3^~kMtUPp7bZezU-?!w*)jSKaBh>l$g3Fnc34r~lNh1Ljj4J5w8LgbfVxt^@=2j6D6Y5yYo~($XHML)KXW{D z@Z|0HoVqou99ObOPqB0CaOKa~4FC90<<0Eo%17w~^KZL)AMsJ#c2t!VlFuHMbRULBsci>sHw7avxm8Ex3;(Bjb(M^9Aj81ao1CybAr7%vO{(*r$Zh@yQE z{eWm6MC(B>x0o&kA*iJ6;+O9*$`IAn6q}JwO);}kFHuXgqRC$POIH-Azbz1>v27KIz0PdRZ#&lQU^iY-^PLEn% z+HB@I&^YmK(Aj#>8TFK06gNf$1M&E=>Wu-c(TxG$DK16rwxAF*x-Gz9)Z2m+p1*p# zP;?_iNV+XBLD1nzCvdR%q1Ba69yo>QdARsVwHJO?)n;;Ww?8-6+dG%@lV+Bl%lUSn zVZ~%PoTLx%(C@t}y*}sn=hmkyk80mts(b;*>AT6kz9hSl%w$Lw5_x9`rbPyem;72d zvE;xA7shA z7qI+ucx+cbf8$vAu8DkM;``uVxXBT#Bw~bvocAyomcUG5Q{#6Ex=ixiX3Xn$yMyik zOmyjJheg~hG%-&v%`l3{W4PH62mw_-b9y+tX~^#%+LV2gT{?Zb^0z~M*=!$R_$gVz zhBBKA0iYnqZv3~Q>|nM)>xcFsPXiXZS4R-<3rKj%9a%TED+Nr0(#rsi1Ihzs)w!K` zp##^eg>X@Kgr)7@A6=kp7M4ta60^3%xJuA!k_V-b0=@((Xe(a5;JpAI_cNeCoSfTs2O+yHV1qLFOe_$7XG(B9H#Ey@F);*y0ltjHZROuSb zANr?Rx&YuP3y=!`B9e$A)gd>u_#tcmO$wq44MpxyoM+?Hmh- zkd+8{9q=V|aGKp9s#hYIMH8xUoEe-0{ATwfrltA^{EGNLXEzX5_J0Pki%_fS*m03d z5wQ#T(tYfmOHY9U_eZ*|{s4#OYt9sC&FSt)IoarP?oVX2 z{gjed{~tN8Pzp{Q=lG2xwm!;bA;n-)sAUT>B6QhM|^S^WaihL^3qG zWANnQj#4Nzx}Clm4V@ex9321F!1(w8`w7xg!|T_F$8N~|D7SNzz8&3}8=hf*Q7%?K zJ~RBIVwvzwdMU~C0lHUEZ!o)iAzmOJWRK-6bEHHS2#0Ab&+S@#? zw$64~z#IwUzuZuah-bnqGF3S5MUuxSCTGr_s4N{j^Z3uR#bWk>=UjVc_T9St!skB6 zzSN&g_I;pc59{Bm3D*c2$zgK3bj zP|l-v7mpc+nu08UL2JZ113N5ceFmP8KoxN{-K#Mrp+>i^Pf74Nif`M6EC`t=ADEPF zm+l5>DK?|gXfq8jmW*~fGPo%eIr%MXm%XL6m9h@L1>#D3`<)(_%jM{4f9qS}!@{>7 zXVzQ!JMcN&0S95P{4Px3bPK>P2X+Sy7`{Pq^#Xi;3sArw=*(ygA>lUSakvnM>hN?B zzb>dDdBn1j)CLbdN)kX`a3HQcs`Bh#-+IqIw=N#L`|e|le=~*{=gL<{bGgye3%|Q? z>X~Ow;mhd{$x?o_^4`%xVU!&k&65trc?#!YUjYw$9ely8jBS!6d`3PRt`q8YnF_9t}~d0S6C=gHd3URYI8v4oH~00-qP$19R^`&;knW&+4kJ|=j7zM$`1A^ zOpZ$p`@RSJ?jb3&h38dNjR+^WVjS4PT@qwPz$+dJ$P$(Wr=2hcQv!1OYAB2tj^XYT zrw*iV?>lhv#NBru&fK0kguK+2|AT*(Use9`3jSfg$o>?<+$38@CB&*jQHGCRz5q8j zbfWXnDk1c$iZa=Du`CEmMJ2&LVWh+}Zxpnu?9_fih61T)qDFrwZ zwgS4-P=m>2ge4IsKspa=+c)~Suj}0BFY{eu6_6_mz<3})!rbi_axZ-Ni zqXKn$REyo&VRyB-dNe%>sN+RoqX~{en4spDe((6*_uN|f7uMQ8IN1LdIr;3Z`}Qu} zmFnq9UBXtTG4#Lq~PCvLv0{ z$`07dgr1zi`q8r+T4HJqdjQtGx;+5fQnenF^`Acr1_ILow+AHRp#Uc6c6bu1Jpdw` zsBj@-mAEwk;+wDrXyN#8-O-hqPRt)TxFa#0>DsZ#g0Vm_9!D~Poc!OjN&N1>j(^#4 zu&aMMx%sCc-TULcz43pE_4dYqFCoh&A*dd_+y>_#xeY1)4k{IR19ny7T?#l!ahC>4 zu_1KcNO~bGJB@$mq2hj2`|oJCS>UD(uz=aznMn}4FNi>@6#_78Id*obLH>Q$okza> zC)t7tVS&uL{K8!%{USer{BDZQCt4tNbdxU+zeP9%%m-%NbpRDoDeMQTb+;SNGNhIx zMn-B$MlDc*NJGb&qo58)6e4j#<67^H-?A7O-jaQcy$RNXA4f9zTsDH!aBLrpC_WN? z76y9LU{l0XUWC6j3s|<6H(j>{67$WX>lid$axNH@q&<+nX4P51H6oddAb8 zH*P3QB5kfbnV9VJ-ngYZkO)T-$6~=yILKpyL|qv_>;MsIKsqX-X^>c>m=xJ`0cZzD zMxC^(r5!f>1uQ2|e&;dL$RJuws z@2t&CH$l%VlwjFFum~*c{3Bk9-*9o_%fHxgX2b3`jQ`m$=g!XUV%FE)`>uE0`?_}v2s6}jYFwF%#{8Vr^ri1G7VxGTZg)6)~Ts0YbcTdKgkF@Ah{k3g4q!<)z#{rW>I&W>si!Sg8c>Gr?-B$ z`51t1)iFpmou#^u?EN|%u&W`Uf+7WQIv{EgG{Og47aJnWbj=`&v5n+plz7qNL+G4O zakvmq3i=_qFs2$+iZPKJ2+ygobgTN7s?jktpyeD76I_ zEAC<#d<@`Hl*0&4>w_>{^@>yAYS8KAE(?--9^y7ByR<`G;N3u(tejCjg&0-Fm8eor z7cswwoBRoeH9T2oHR3WbPv0b~(Fc}32#e^`a7yQv;3CDtz@wO(WUeN2WAj&=yZ%qt zHFvTy&z@O+0dsTNUaQ+gwXT8^DzAb;K$n3HpLeSP@uDNnQIu~|U8EF)fJa@91l2UP z!Jhz~UHlhnC`rW*)TZPPg$&CsURd{o zu$6DcMs-7SKP9Z?5rB$96^N;aeMq%ebCyhIY|{9sKSA?HsG_?c1;cT#ERc%}sw$UU z<5A?FbW2^pE6M>t%`g{%!x9YZYSJC80{s@as3lfnarq)pT!sXCj~SUGF_B?+3M?CM z8y}D@m4808>+r5a@|nuso-GrJ+tNLU5^1dIH$0ah$=7s7tSQ5!y@T(S@kD8m-SWAcHjiSjo~sOO9|;~0`$hxNKf-o_FK>dLk%E79j(tZyBuQUp zWmQ&qIT{zZL=Wo`8W(Out*)~cBR0prAn&V={qj6M7`vDC3XhLAHh2Ich`@=db}`~> zC=R=!8ZnNdsu4m4#Sh$ z2P!?ZQP>rN8N}emJtYxV4Hk+TNA1K)<81HQ_L=Rebpzc?+w$q;62gnl{dvcFe_>O$ z@@*Dd=ucO^B_F1#F+cE5^T#Mo0d-Gh6qhpc`N>Rm5^W^3`GMxD0Xu|zOW0G=fW(3@ zjyg#6$bB%KMv$^TDn$Y$4XuFeS1N*jSp%OzS_k;N$OUdi3@PfvQ3WK*FYK{U0J;fL zcP7*N9Vob=7Db)lgifAgo;(qw5_EVHYdX2gL+DxQa<;YgrxP)s*BN$&sKL@LBY%1RPUL+)e6C_%yqmnMwD)r5 znX4aT_sRbb`N<`XBM-UNOll{|RszX(%*=^zlG2VlKrV=sCaS>&hA`><&$>EfN`NV# z4MrUI`!s`{ffsg3Ri0UrH!pt{uC=$f`RuJIwPW|S%Ku(@_xZ|`@~4--^Bs9l%Z;ELDoC;4h;bs7DSC z0xL3)k^6UfH5o5_dP~cnl{Z)JsDi{U$^U-ki|pumcH5hNWkGlx!KBzN{{ci_`3hi~ zQepDK=3sX@TVT4c!B89b8;Q6)jwCmUroa-u09^VjA*$SpkP2Sv(}kFF4P&eZUkQg1 zJ65@0U`=*%2!Xfmw#unxI7$TO*n>4_`_=My-mJ`B`63n#SQoE;O!`SJthAf3&s2Bw zWy6Z#NWg04@g5}=g$|BTN_x4nC~sZ@J{S4cvP%N1%A-r{5q2A)biIm6Il~cK*{=d? z_|0bk>yLC;{n7!k-*uREz_ATsPC8-+wM!C5Za4p)&?|lk1k*zt!=A1#x4X;Vg_@-r zrY$o6|tNW>lzxT2)R_2(m&1Y+|+gohD zw#rYJ|FiyQdP2VOYp?t2D;_XGO@p=|Bk19L<%PN*Ux^2ZmIW+7kaw%F#9ZwJOT$_A zB#O~Q{qgoB*bgfIjFUrv*nMrRyE31<`>OC+tJT&Zry@&qATX|D+x$+cp+>R*I5cKWj$EUZr z=>W)LGNr#FD+5*pneDV#D4C8zb$NYsCF(#Hiz7-WvcR6G+{HYVf2sVhC1v``o^$eZ zSH4QwbO841Rc#K(EYc2zgueDnE1D`{jCEHZXNQ$(jA33DW6-X`;sUYfg4qhy1?-Yr zDNt-zMp_g~WKb%ta>RyM9fPtnnUq;FolN(pdSZU0l(-x%a38XSoDjW@LAy_3jxPWg zJ%}!QnWHBXOhQt1Zz>s&`no+XIIXNGuL7-tinF@7=F_Q}Z%GG&z6XNgNkBrsD-t5N z-L#O%o;;aNEZlVd^tScm6LY=s(;TTsZhU7teXcM4-d%^!ZF=wMER2{NM&A=loV&7H z`NJ!)Oaey_VlSWM`HD9!{~5QAYNb(+^msx8-E*aD6O0Ki6e`@6?SOT{6oGLeaeyOaJT@!b}b5WpGX_Y?V! zJg~zhNPkR(f}JFD;y}rKm|0q=gji`wn!_<6(@K}<8)F1ZG19H$&@ygG$#D(fDl!G3 z1afm6>(?B@{OioVNqMS(=joFzJdR2d8I&arhsSs2E8iKqaXcco42@3~ey}Hp&)>(1 zVHFwY|L5R`b{?~W3-ls6&W#V#Rf@zRBt?S1Qc@(v87hXIcu6;ff?$oTpdgXPj1Q$E zv%?SFC%-cxt-rZR0WyW;BU1feY?iZM7#wI z9?Zpt^8Bc&rJ!J6s*Q+ld$Qpy*J3H$lmU13#@vjq~q*H|whW?8W!6uK%yL zF9D3Qy7s^Kn|&tBOlHfTnN0R&$i9#OLly!dK-d#Rk|7C6AS93kL=>e+713JvT5GLs zwRNiwh?M&Jto2#zd$m6QYW=s?T3?^_wQB8VeM-pi|DF4N-^^q|?d$so?%ex*-(Ai< z_uO;0bI$M5}Zh{^%e|f=ddxIwfxqQ!CKw2o`A| zZ^bm7=xB8KT|jZ6`YjR{g942$M8NWpAjc;qGmg)kcOX}QfK&0qp9BNxbiw)-ke52K z4^&i)JN~$HgN-0kQ;NY=zLic}kJvb)y`kzElQl?G-9$=A)ict<)Qr5FU-lL*uBuvG z=$+8%9kwMkZ>xRf?V2SvyIwzWs=Dai%ErdZcZ;e|0d>$$$)o~|YqTCO3gpMjl!D`R zDAvVi(ha4~r$p-|C0R9SHyB64{p1F&=t&@rD~8t|xG!uxG`&kpr}IE7S)6)t`qut_ z{&-SOf)#%WIXKvdECT2R0{S@B$9&3~XoPa^3k>jdezC?@<;BVjg0;{Nk89C(r^Z=} zP&KdRzYU*T@!uaW=Z0_^FOarQj%%dwUoQ*)4J!X8c@veDfIZ~p*sHhGt_IWu12QrV zfo`DDpa&odvA$Bs+vCsf%{CY_igOhcN|0p`QeG(6$KT1$%J6R7e)qw4sc_Zze1+o! zo6g~o;<1TwL5GRJ)<)2wop_=JY7poGE9S63phiN^iBJ^v3k`JwMfo8ivs%_e;Z*O@ zX?&W*CpTj=43lMy$hsRZd*zOO{PdpiU5>m2Yht1;A=e=#Pv`N|$VUSS^?Rhn{9~-5 zD%euCBCyN}6~quYZS(fff%38UkHM26;fd{8ueOT#A zPmz|DWoNXKooxKjbK3&oAS%-kkN!lGBG3{0c`$dddh|kYB48ad4It}oG&c>yTw3dL z zlMHd%I7@Nfo!=g;Z{)73#>VOvOi(H{bfbYTTDI=on`v{VrDUWm&AD~3=J*=owRLP* z@<_!v1HXawW2JNhX2fIchw>TpG7=co>YyA8dQUJn#5FCm1v3jXW)@s5fIhW!7(xhwVT8y0kz$WdwbHhbboCDlHF>vLC0aYI)Us~$M6;dC=>R%ed#V|0yI0r zJ`Fks=JObs(HQ&fu&GY>6~IzJ__>O%ZDJ`g0Fuy6|(o;pceGmfLLJ1ZzTKB9sx_)_I%FgNFq10N6^;pT$Js zEk=6XA-zzUpO>3r#T^e#D#j@6MWbIq3}Ft*_zV584oDYynSR>An(!qB; zp!>Cf)g{^Bx^{UxU%5OEG}CQK+9Xgek)t~ZRm$Q5xrVLvdO7o!dP|{Q4)Xg+SYR>_ z_n5!|XtowJ5_xU38H6qZy~-N97^g6N)98d*n_{3)nB_Yf=de}hE$nqv$CWlL9}rKt z{osf*=e6`Me0^bWOI~hEZ^Kx4Iw>LE(OBWDFLd#5zMPa8=WxU&CcO;wW1ju}^HaP< z%(GWbza{3`tDX_-IGQeFcMBsr+3YdW0XTN$#Rhdon#qvkO)JPKfQCn>HQwMb;WoNi zn4+dUzks>>W|8OWuwCT2;swTjtIplA1R!;>P!@p`k%;^W=I)AdE3pQ{HF5yZfVn#j zA0iwxcNbZpXZ=s|z})>aB$vxY*O|Qt%h2$~g*8=W{-T0xT*ObIC9K^HjiQ=_I9kan z^tZ)?jaEwoV3_UWBLXS)u z+a>Ff$;8A#!xG{x(8vU9QG;=2$!O#~;`;O=vLBL5L1u&=o0$u! z8Y#OAQN=QeGZf2sm7_qc1xQvEZt7#9U?!qtCaPveQ8e@TZ9Dy@ye|C( zV>ileTWZGo7Gyn+l^-@CiM-=57tr@$jDXU#L>z6$wrjwz#Ak;;Q-bb5egS0C+FDzh z7B@C5tgl;8l2=(!Ihzcvazp8-j)8f3Fb8a4OR!9A!y zT37~HlGCoYB{AC|8o;1~y!gUZc{$mh%tV}2NIXG0#{8krkabKZ-GOZ#9m|)uv;+c;jU8(`)~sH&eC6_$?JHW^TH2Pi2ATuSO-mb> zG%i`Zh^kC_p(_;yjX2=Nrdlp6qI+dMscM_cIs<4ovXmM!ZOn6#=Q_BLXZdTjKSTWR5Aef(jnvYQ@N>?MA8vkP z)9IMMO;2nh{YL8FrY9nQSalKIiFPe!r2ny&otE3QCNnJ~CEXooU`{B@)5a&T_(8NK zl!`4}g+{YLidVvpCvL$}JL2Z6CLy6A0j*dizm(%p3w}aIk!0NgEP~3)N=m9~tI#NA zm1Ss_k_9E$)A$VSl6Ap$nZG?^+hO|6ptpBS!TT>2KNj!5RPer5I^_MOm;Vq54v)>k zatwwQ?27U5+*9X2WWSer+tiMojg(V6+^$NeEEgjr=fvUNWX$?qyk^10zEPOA*&SM`N2We{b(r*d*Mrp|i6CleqrD{=ppsy?woX+k;zs zw)Skhxa*>>i?(!c*t}u$rj4EJJJ+vUEB9+u9{qYbdUXr>bO1eCaIw$SrD^FX{u+sL zRKa0%X0OtrXZtZi#pu*trDMSp%#k8g?+8rLY4Ha8GUd*Nw?_~r)I8cMz4 zY45qq=vUK5zx?+^n{DckVe>31Rars|wa&v6h&j-_V*=yy-C}GiRCE!p{syCU4sCvy zld@l0=fFfa%q~FtvOx&@V%|mci?dAZl+wwW5{}HaD&&M%&wTcjn0JWM=1W9nXh>vv z_O7TqG2UNNT$n4^xyv4(nwXm7lV_FM@EVM}zeoRZi9OLuoGov5V->+W;y7jt8l!=v z5^Rpukw{@2flN=GB>**w9)~T#%WYnTz>bdRnRzrZ7p9DsypHJho5C zGS=+9^lI8n$A$=ZdSS3UAB)x)0g><Cali%3&Pq$RI;}W?ic5^cEe<;M ziBW%PCGiUE`RFhm(xI-T4p2kO#cG@t#8h|s#pxHdbyC^KcSvQ^FY1S;Uwq27)nyf8 zF$Er&y5{F6>F)?B)Z;(CHvA2~?b_+xYNa$&Z(>jPISt)7$!2*B(&#jP(=ik-xa7>GB8<+SfQNkzZN^$xmiqes+p%Mvq><2;%cc&9v3?n_bPw8e+5cs*b<)+h z9JBfUt*f!I_}~NIxb=ps58xoiD8YuQrsMf%fAr*;?;gH#d~aZ3eML#Jl?*}Ah6avJ z;i1`S?JQP6X!6jbcYS5&@TQG9zAU|oR233FZ#dt}8^v6sU0Fx9V;u#2#;&8F;2FGG zUWqSZ*h1x;R-HBnbm1nEc?Dd1)`IRtrjoet5huM284D-F%};bGI@!e`SG%A>+HqIJ zyvR{7Q z<>mySD-HYwViTWKY^<0R9Zxz%vDxxTqiD=Gs7T=!d~Wvn;qnQp%GBNed*@J7ck_#n zT%vvJ?4QeYOU`~N<{S!ph}cb*V0QxNHAhjdjb!_ktu-6A)_mej=O^sH_!s!YwyPM) zu>U!LAI9*T9Dv@(nJfUTj1qBnwVSNMB4&uZ?iVL#1xE84S!U#`->Ww6L zYwE$#=^`ObuftG@lS-Iel7$3oV$w5QW|x!G0pzEpCW+-L7RTbHxOmb+q(Y&l#n^O# zk^>cLGGGSRnqiVNCi(n{=!hqS^WuUWFPX}amw=pXvyer-q(l=GS4MF4S_D^S!WFwq zkvc^&(5EbLC8j{Mn;=kxJx|#DQ}IGI%b7neDis?ykWD!tFU(1(5^CcO1 zr{6fgU&=oJU!YZ$qUX>l%^RlQ(6&r{KV>iFF?AUEGI39kG%eCS!N?WCAspJ`avW{; zfFK%-t0M=sHmjLt`IwetdX6c+d{9i&_^HFvs~gU}s@d0ns9(BaI)8(v^xU<$R6R1ilLn^7&&%Qb{YoEby>nhuO%Bg<|jQ+F^ej;nJh zH7a46#eMnSOb$w{i{S@n*B&QfamboM!fm zYTaB3I0)#?k&5e_EhfjWn)>-FGSp!apD|E_ zp`%i>XJ)gWbt^lo&9~g2nC-Asl*VJU!O9Qg^F#2m*P zm^9~NhXHmSobgMS;(SGcn|~nf#oh8!x+|BPj#(VS41L2VM~AAl;&R zgUzJfcuv!O^M8aN{q?O6P){)7e=@2l?H!+tvm-7h(5R>uM+bHbYLw6OXY>l<4d|ny zPHr}E+D%%sNGP22Z0F!B$GuRfwAbyWpb;eZg4rRYj;&L-pK34>}}()w)zG6`eYz zyDvVIAU}JtZlzqY+b0NEQi3kmLhj!x%r2}f(Z93eAjXS>P;3ccYo1s?@-`=#cNoq6>8e)xy5KUx)P=y(VX3U>zn^^ zOV>G_c0;_=Xy)A}izDNHQr`Yhg4<-_%QXg*G1;m>I$?D~Y*Ayd*mHhG+T7nrN;O%) zn{H6>?Z8?Hnq{(55;C7;r?4TYp;=!I(11`NPDe}>ZwaBd~YOyDJ2xxf2ESj+X~W`%J7Q*OT4r( zP@kP@h1~QUu0$I%bGTBD&+Absn#Nqge#AiUQ=rb*5r*m7iGuNSzXq7a=SnLoG%rv6 ztm|e0yw@5&k&rCdkAiUOvP+jvOuS*wOVC(z9k&p2W00-D{WtJZ==dWGt?7<9%+PoT zj<>PUBUS$3DvFk^vQe>;vc?b-HhV3`yE(m()aN`PZx5(BG`l=#OyPme#Y zdEs2eX#gAn^hr=g%%imT4-@YTDu@9z_L*p#PrnU*DZ`CDlQ?il-a(QsF%aVs@Zya` z(l0WYWb9xbFvFKVF8z4Aeg;@`;Gl9fQ}?mO$Z-?S8d}l8|7~R`A1p3wLo1|aRyIO@ zW@8H-dUdQ{S6!v$?VC3>H)-`N(qdwm=ZyiWT*?S?B#?>K84hEZ!!=X=VJvq^O)n&6 zv4qA8sT-c8$I&P>Ev+SkdTIjLG&Y!}*o2r*;%e}G=^zN!u&{iA&zEcW;RiCKRRp5i%ikaK(E4%h8_F? z6q}P~LjNL~rST$CFe#T2`Vmkx1}79Q#_D+r$G|sZZi3mU=HA)RoZG7V=FG|YZ{~o< zduPl=lrcRr-Z@w}~5Xc93u@wqGFK3a$DyK+D53 zP5;EYg=QMc>@ausylU!{=Jks5bGh2e8S}&EwSRkaCiL92bm=4}hk1#bggpBj6Q^cR zq%>2fVr-vS=!8lu>A}%(jZOrxc|@mxfFAcWa26%Z9zn7PTZ-7KG%CBkg&eZ5*!krj zgo~zs!e5ntWveK()uPPT!r+@W2Hjt%>YSI#i^F(`U9qG%=u!DhQ+1ncMmsWFl8eSdG=d4VpA=+LCD81UJisCTexeobAhFs9qjA)gg4IU(D66C^#gomG4Z$JqNox zE4Rj9B9EX2b^hYIA|FQ0!a5pDGcaoQw=XWPp^-GZpx*DVEA-+k;`wU}5TdkxF}iB# zVw$ohrS3Tlnqy$CMoG) z5fQ?ppCO@29Vknr;YNr7Xy&AapkfqDvCS1&Zhgn|zikY=v5uwCnr;qs3jSBzlVSfym3&5bEu-{D9>}4ill#HJT z{XpnTEc@ZVCq2}kDoNuc7I&DLVPMP* z3Fs&Y686F4WHvP)1R%uBi!bWh6vJ+6OG})Dar{{YvypWj&gL=E=URD<9$9kbN2l#G zd5-)(uX^gnK9|##>8q0YhbyPBu0)Wf+gDIqBKU_RBP&1S&d35DHq11dEVoBxFHuUC zSyO!hCj6fK>JnJ=y)nm`9BIr*AT>MN^pnV9AM65?%It#dNHa$Q%Sam*pMGCy??$@w zMw&5TZ<`x)=|}2mL<~YAb3e~@6kea;C0jmQ6_akwMb$+@^F%yup=pLzl3i?6|!=vtEe&d$Q1ur?p(2TTh_%Q}k+Ao9O*c_GC9VQ3k9 zN|;k0#_2E47p?#|hj=msZE$=fZbZR-bcr?2%q69e`i5f41u(ltRx|S< zq6(wZZrqTU_s!*7OZmCtG9Sd@vpAX7(Hq>5UL{MMxz*Eq!&A{UB)_Y4sG`2Dth8fW zPxor+5YOMzDA~gobDbPVoUsNM^%Yev^p|#S?H%mW9Foc|E}H(Mv}N)rnSqlRl@BGk zs)}-|5?raHYghJJt(~3mdQVpP!$U9a`#i;>d;bQ*OIaDE`Q8e|+OxiMr!}Rs)2(wS zy>&>8onZ~^B1^zyfUOF&3p}C+Ky(i0O#u_wa6=t#^Z|#!ATBQ)Mi;4 z+0KkYx4@xtbfT`y6(r8aq3?n?3v2i{=Rw-mj%~dHXPv#;E2Tz%MFUWDaZeOQ{Mbu# zaMYe2gQEEU&O-lu7!m#ZEa>X2z{k448t7Zm@MP>VrQiZl8txJ_)~5myK!;)wjy6=W zk9h@_3z#{OL2!Nyo z5l$>tHuh_NP+VSBGy@5riB6Ie20Lk==|RYsgrz}T6wSb}yDZR*+p!r?7{ghP!;X3{ z@WYT&DstA2~>s63`5Lce5laTa-_EtVuG~wuf{^!J7Z%w>3TPEVrt8?TPn1p-GrIJ^v-;zKh zs)j|m!;Xanszs|1y3HnA?4acZc*!94AE=38sAE>OG!bQ1zF2uEunQ81Nw>`Pn#FasWkrPvd@MEs5Kmdzo_NVxSX`VG#7$_Dy7_dvdpFzQOi3|o6AMa9 z9Xob7N=pk8HRj|Lry+Z1w%!Eg!lu*=kF`6{Z7nRyPfW}&DijZ%%v5~Tne^F)IdVMR zGf+-f^f8&3ewWot%?jQjbwDW3NOu}Bg8<42-An50%8QH4bJ6X~&LnheYX5G0=g&pA zGjDrR9wAp7Z#DwC2mrbHNx}SJQgJbm8|U>Z$hF2}NgZz`wZl8zm z{p0la@AFU_I&3%{4|1b-)_F%6A!US)`q8mOupUmDTd3}v^(>-$h{s*EM5={Uo5K2pMvbPkM@pt&=f`H! z(!WYCJrMqD3@@Y&{gwF(v2GX(c_oCr{yD4Zrd+5`f!lXAG{{Kf0IRN5{PXNNj!M;J1 z|39JlL@dk(fL;B+-76qFN#c`rPe1+ikDmU~Gyn3`lTVyEb@Gvi@Bh}_U%l(jJ8r-6 zhHI|A{IbdMkzG57xA$({vT5VmHF?R^*_n>qOj{;Y_|wW$UaVrR4y(k_DCBk-;JNeY z0dhq+rjQq@h=~nhjU!W{yUjJVLY)P6kY&q;Vl^u(CaS~*3mmxGRsBS``CW^|*xav% z+EG1XLg2+m`a9gh5OO8ugtw$s07-E_pcYeoES-hf2b7r?$x2pr6j6JGIkfX^Uf6KCMzTC!cXBv)ExesY@>Slivb zwr+PmHk@mh9m?2JInh{K)Z|WcIMUorw`Nz{VS&wMugEB4;I0aVDr;g2)Zm_RLG~+%7?j+p6zQD(&9Gp7(*nqr6n!&Ke zC24TxJB%%40N+n5@E~QgNSHQLKlT1 zDr6!Le_Gt`Kq8{R6t6Ga=J4UlcY1NIqRBxh{=gYCj$IY83ha{GHZ8Aet0~V)&o=XO z;ib-|;;Po#imY_+yG>1NimQurGP!j!Jrf~sy4V{HC!#)o5%n26jl)xH6e;zAkS$Gu zpvwbLi)|xV!li8zJuIluT3Q#REjU!CfMP3PUSNwELd}#fBLWiE!2ur=iAz z!g7d@-itRm(T^d(*QjYFn#{n;lqoAGPw#9MoVqO7?D$3_yxd z3gXDN2`aK>8m(d)vleofhd(JX)0(w2(gqqx2%DP>*tfi3awt84R1IcYvr=R5$bvol z=D=dG`*MFter{Td3zNHei(bnX@`dVkI$%soY}e+?r>2Lx3o$sjg$^b{p*BJfWX)42 z*g0bPvcr4JI+j$Y=HhB`YhU}sRXseeVZo6jYcn9wCrK-t>)NYJYdaP$Xh$S>TK^j9 zZ<(I<_Qm|i*v75jG_a|DVmQ32x3c!p&PO|O_!?i9>S$~h9}pqcy)dx2{hH3Q%E9G> zAQSZcGIl%vn)DGg2f4|*oCRA5k);RrnLt($xQ7@3>5mkGj*zsMl>jF}ctvQxl%?5b zs95pQow{&%_y+!9IBlH7b-*n#S=aA7$dB{iN*{eZ`7;adrp4n*Q;Owhf15zhg=_)z z6c0fUt+1Cub0!Bi{E&=xUXX*hhe4LIQ^0eCK#lq_BFZzvh6$bFL-QH##@WJKr7z;{ zn{9&cgi7BXjGxOszaP3qsjQ{)xj-U#8}ehCe)JomrZ<^(Q9h5?kR?%6eTfL2vr9o` zQHs}jE?b&7BuyUQt1+DWn63k$PPd^>_s*;nfTtZiPNTtL}}3Q z5Qm%;8!)I7DOd(ev4w9tB;7ncx>3^{zI`HmCzHSr)cQu9cfmMWxBIee$&` zQD6pUnz1F)?JbCg3YkC+3Ux$fXOzN8{!cBODjW_Rk{+L4IsLOMR2YP2yr_FrsyiY-iO;HN?JWBqq)?9}50rljCKPP^5f?GvMOqmj z6TpWB*858rgp(F^yQj zR4kpIZilk)_UWG;0;U9fiVkVb^wZ~Wl1y0B*0QC7EBZOk6u}h>0PG58N`m|WR!#;S zJHmD@WG`$G#T-m=B|EgtZB5WXtxnt^skt><2I#y;}@}bX?uaZ-DpsJ~<4zd*<>L{=z_M@xza-o_z92M(uMme;MsVbEACru-q_Y(uo>m z&;p4TPCoW*ay>o|W>qw)irxalY*YPn&qX=eHlR*piy*gJ4gsw(*Z%WmeURZhRrGy3 z{BI)o-Hm595#o3bX@}&wgr|*X@Q1#p2`|8T*W$T&f*=+qpfa9Ju;Glo3o}L@2Zy!H zK8P}C1)hvX;yk!Nu?Y9|#1(GBlaiJ`08d7f#wg?I0{JK@|9(8_WN|qgNnp-0&qi0U zY~mt3bfSb8kjhNnuONoBAgEeV!oTEI-ta?GD)bqQLgVv9_;o_nrZXD}OZa+FMikN0 zRs6S_RiGu(W^!vFF@+?-I$(=RYER{c6*7Sp+7jceS_{-Q>kSq?h6+-Hkr0o4FjYek zR2LWw4P^E_pH8{y8?OEj&6iGz*&WC&DPb(CvIH>1Xp@mCCy8!^#dcIk88{O(q{Ytd z#F48?JHBcXo-`0vd@;n6?z-p7(zf!n9b2xt+Zq0riYfjL-V*-R<)rK(P9~!9d%Txf z7PNL({$St>&MUINHW8&J!qJqJ3LOCTY!O?@w?7Wi4>5%&wGPebV+lsnIC!2BO6i8} zsJNR{yEqy4%`6s$kQT0TKx06Z4`Np=vBzbT5)&H|Y5qv;wS}@`rUZy_{6l$>NJ*2L z7xii?8n+>bJ~iK&Pz9F#k20}wRB9Enas`>o)!>2%V|M@z-QLz5AcZ49;|w)I1+XTT znUP$fu!`h4oT73unT(vP6M{4``XK#Jnnr}`pe3YfXr^k28NB2Q^Ob{n5mgb=n#zR@ zmE~0p+VE7g=j^=Bd!3XMRTiOgnTo6BfI`9kiU*`qZ9tz2_pYU?zFeU(bE5p0q5j z`##X3_7!lE7x(g|#BEVs$d>NJ-1R>+E1;RPhTR-U$ij(m5jV$69hqFyF4w;_5qdho zZ%y$GJH=vtpbSe^QU(y2AK<35m>v`p-O?u-jV_p0j@=zdk5x)VX9w!AW3`b~UF=97 zsk)+mR^cX@l3T?^Euv5Dj2VIm7h}2Fs>2*V&DI?&OAt}Zts1Gl|eP*FU zYC&m1WnpDrjwf?o1#UEk=gU1!&{A~zBqJ4C%p+TSc47Hd-cST-86Hwu?<~CX_==*U z3UXUzuOe&!TPE(S_B@bV8htA$FXs=#D=RA~stBJJ<2>*Qy)@See0p(fppv~QokSQ$C5W}4KD-EC9W&=q`Vkn+dL8K(T4a^U&075hOxR!$oG0s%9&Dowa=APg{SjnL$Pf!V-~*owiFTI{Wg z3+!C8CO2nMLrqmlQBHYod9urv?6O+OAc?H>*bM=0E!n$LuM^<h4+iz|I=vqHM#wlyAAh;1y1EF?nrN{kl7lhF zAy->=L2+(Fdq-;ucTJb^FQ=~XmfaMHtIbH(7>pjSGm(@lZHmDT9NiJd7AuYwjkx9vp=!t+lIkbu zOb;R(sKqy#bU{c<2}KqG7%Kks6o^a7^ki?YSr49MN4+YGN{dR}z9gGF+mUafOSg2) zA@9*TeKLkf0FQKXKm=OsY6&Dli%E4ZCI+&y+Hg;b3(UH>bVXIhWNDtS*x|hKtCB68 zd*kjy3vxF4x9q6V{4$&y9?CB+%<$X!{ds|`{3ZUB)Uu|6?Be2Vr$6&~uAlt#olUv< z{asrcJHI$FvCh%3rLe@4Mq?k?XynguQtFplds`q!F5ctYsmVXz)05cvHC_a7bhiiZ%nCUx(UxcGPsJ>LMpTk`S_YT~ZaH((;69SNKCZ$$ z;EKM{d)(s_-$WdIr#Rt%m)+DohyzEkPoKxTm!z(9fGhkjcxQ+E797D$xF+{S#r+Ka zt#AZ4=~toplRpf1H(VXu+0S@+cxL?8TfTOk|I-&NY^oQZ-9pQ`|;oup#$KcdB z^e*`Vj_7LBEVmxdv5xRH+Yv4)|5&HiliG@4igmNo181VmRe$z(dM3J~I>tJpJC*i` z)`>pI5iPpmGT^e{9B@=;f?>8#btat5=TkobO$eM241}9c@>Muz`+)iUZ;_VpNOg?$ zDNK&wr?ymm%40r9_3wdGeZoEAX11^PbBd?7{cNB5KlNd1E5akS72!_x)$g-?qCuiH zqR+?SRC=R#DxDJj?ohrx1)p3WoCS_(oOlR56YNxPf|u$}@KWEWx>MVcQ|n9d>6zkE zdLtaYqcFLj!);Y?P54+1*9E7>SKEd70Pzv}M(HZy>foqs;%h#*c*Xa_uZAPsCBiW{ zg1HDzr5nQE190Dgdj#$_#oY^^-jg5up576TsVunjVfX}3KOEJaoJuFe8wfur>s$y< z3qJEbnb(9t>w+hJgzp49)h`*YU2)CuiDu~=;UgW6>Z;NeeW$v5;8ws~(okMhej6Ocr)M?ZFv3)K z!Z*dII0OgfN%5%sGB_%W+Mx!H-cvr5xB8v(r+ldH1RpuVFX6Ncj_Rd4DwFb{_yi~6 zgPzGzUE$6Hzvs^)OzJl@D}*TFh;$JvYtZzh##JS13ii{4DZ8-;0`G6Fnr`SbvyhUkgvj{r|w3W zX!s{^cWXneh8*IcZOBc(fiy@Rd-w$f^n`Q{gD1n{S$_CL%-o( z!=H?PPd|Ikt5*ycFuc*OCRv(tIh`K+tP zb;e!e?sV^W-|BuMxi|TR$nU`mVGJlh`HS6lEr?Wov_&wkE_Ii(I4`m!Nbt4>$FR~=U!s2-_)y2e^lSMy@+*4mfq3hFl19jSYzUSGeh{>J)0EzDlH zZsFAn&n$eeA+upa!_mf;#yySqHomebVNv6v@kLK9u2{T%@m-7mZHZ$^(~_H)JROJ& z)C6t_ytTAu>9(bZmp<22(X_ScXw%b8e`>C2?rJ{Rd{6VUEmBKkOMlC^T3%^QZ(ZGb zs`YovHY_{UwyEv%w&#~8FJHWT+wx<}A71|R<>yuutk|^T;EKCfJh$Rc?dk2y+sE7Q zZhx-*gO#3@Lo2UXd2;1*D}S@{!;XZG#T{K8dpb^aywLGhM|hQERo$v%tDaf){%XhS z!>gZJ{kt_uYZkA$XUz+p>7ApUr#jzUt6#fe?d00K)}^j%T=(qy4eO7u|6s$84R>yM zZ)5Vtts6s|CO18`S--hv^Onu`Z2nzWYS*^zt=-`*3%9&~(fC&5)|#y+FV4L9iEWwN zCbzxTQ_}NFZ(MIv?_I&1;Ev#p!IQyPw#RKB+J0>NgME$taRcUo(*r->k-VdBNAHds zcDyuLF?i?T^MmgUv*G2#w+=tCGihhp&i0)nJMY-}#LnOB((cOHwS3q1UH9*LW!L*7 zt`Yyp@{waB501REyKMLJ-9x+Y-~Gy{e$+pD*XXmO?~gT&Z6AAek9E(cJ$LT;`QEa< zqkGTneSdt<_{s6tCma)7CT^WLJ@MgWV6u1ehRHweTfA@QzT^9z+n>IF`Tm3ZL;K&k zWbq|CFM0Jq*@5u`uV316>9I>sU;4_W;e(!oEeCfTyzAgg2S2#Xby?uDotNEy*%Oz2 zba~C?mtX$up|V4FAA0`K*(-Xlc>T)sD@U$;`*7;v-op=mj(u*^=bkv?IkMx(gIDSC zZ}C-ouDbcEXO89^?Krys^Ot}A?A0AtAHMqetKYlEcumtazd4qEY~8V|k3DnkvTMI_ z+1683n;$E9?8Q?zp8D|d$>t=29N*;`+cBq(kp!JX!z06c_IOtB8{dbj zjh1*;j*U)C21h4{M?FpZMg~TAjqM+roZK_9aKVC0F1f_Nb6|42z?Q#nYZ05vK1RLF z-h0q({0ig^NZ*9Rhq<|^d6Y(SX^7Oafh~jL)lGn+6o*XZFi$%-y@#necp^D`5*!}X zaFXpno%g{X11<=Alc;L}ut5;l!Bt{QbZ%K{Nl{qm#)*YzShPwIc~6RV>IZ&yqvUb6 z3-87N#izI9+!8tk%+x}A@Xy0mqbB2UBlt&kDZ#syq6H>UF178Xcu)ADSOa*o3*Yx6 zAHu~RK)DcN01H4J^v@52YeZYy1_%%0BSbk&;FlMq7&A!V{;fB#O>__Yu>@_svFWKwtS3v9+*snnr2e1msfF-{X z_H*_hoUu39o45_1g=YB<`&ag3_6Ufk59II@kim28-!N)E&wk2Yz_|S)`#{50aJH#$$zhzhA3ie_4IrbiV zg8diU!9DEv?0t3<`#Sqi_8acy+1$r-crMRl(=5#Mc>yovMeIBd!Afw4l;qvZcsZ}Y zWwk1pF{puc%{ulG`yVfECcl^8$G^qD&F|+A@CW%r{9*nb{s@1RhoBSYB!7&b;*ayw z{JZ=N{~mvW{}cZ{|7ZRr{{erB|Bye;|Ajxpf5e~VKjzQzf93zif5M;VKjkm*f9Egq z|HEJ6FY{OU&-knS|MH*nU+~xXFZt{ISNskBYyKwx5B?kepZqQUTmClxFaA6Jd;Si8 zm%qpVz~AS8g|CpcUQ~VsC=3#ywN)9;AdZEWa(n)&B zAQ>f-WR@&a94r9CvY}*^Y*LbBhb*{La!GC}SxSMtPa1UR!D6$NC3z$-p?kakFe(vUPP?UZ&&BhqeZ zR2q}^NPDGmX+oNm_DTDtOK^g8sdP}fOuAe;BwZn0DIJzRCmoTll8#EBm##MK8y&_x ztxWM-6n|;z(4~8Z21a`nZ*Y7dI5{xx9~mADS|>(=6GJ`XSG-7_~1I3@T!Qp<`ok?Ik>$vT~@g6wwkb*eFWBoldJQ;$yue7cE#>e6}4(%Hq436*HJ%V|4{Jz*H z!^W|}vC)BDhJCWHZ|NH!3r-rA?hYa_Y6!}{e(CmctPu1;;Te`HiGs3kY3>^y@7uR~ z$H>3|OJ6i>ZtfqO4EABZJZbKW1W`l~S?l|SXJ}PQ!n~bI)VInd4G7QN8cAjzhy)F- zO8El{(5-Tg1H!YkMIo>ZM#JW|NCxJ?NYJu88pARa4eM7-4v+K?=!b=8SfM~UtoSPw z2!<61RtNj`O%CWsWpA|%&!}9(HArG{(d8jcAseobt1#E->3>DS5GjmzFTIoEO7 zTQ9$y5T0dy6mrW%G_2nsN8Tt`W}jS{jS4{f6o59$0PPc=ZsYjy=%8*N{aQ9hv$9+g z6Yh=)Um6YTx5z*p6rOoY1k2`wk)VF_;CK+EbcyV3mQ!3JJk#d>;eqjiiQx&;C2Ghl z;`#f669e(;JMm0t0eTR8RyHBJhYu6V+CZBTyuQw%E3iKgxMc z1V<(hQp?V0SU)LBqqr4iwPH+xUnM`+DbID~%CpG3qO4Bj z4ZkXG7dYm?=m_}TaG!oyrok054TAX(PG|;32Q{sugI2&m>@UIsT`@ya%$SlL;hItY z84U>*sc_`k+&y8>$i9gl`jHu_GIYd@CP{LV*kMQDsH$8ZL#xVLm1lWuttyvsSyiDt zR|q_mRh7$4TvaYNc~!aGmQ@wX`${E#rTVPoTcxD0lG~=LO8H);mbXkPUoA&&+o}o$ zj&dbkxlCGB6{;_jVpW9_zg(uNDuqO=%9L`dRbNe~z*V8bFOzmvg;K8y1)d5OKg#!V zK_6vR6$(5R>a$XAr4qkdsYj&(SCx`qrRppBRVwK#mHaA|@+%d%s?_gFJ*yOaR4VnT zRGzEUa@2TgdL_Rq1umt_R#mC(qQG6H!lTwhfxk@kmGa7z?`2APN*}JOR-WsX_N-Ua zDfOyW{4xcuYQ-;8;HXyoG6g@?ieIL{U#<9MO8u%;U%^kA;#Vv6s8;Y&raYHbNW(j& z744!k#&&~vF%Lrb6kW%m_SM)ZHZ@A52Pdo1$d8RG&*fsi%DF00W<*U8xzdk_qYL`M zc{S@hn_cpRYxcCuDRRYFx@%x^e0XqZQUO?gGcvGaG6G%_w)^@9X1ubisWJOf(>f^{ zauX3FfTXCvDqo}V(`1nL?i-kx#GdJl+Sq4)6R;T%4vddct&}-Vnl#255N)wg48#DX2iHw}cJ9%m;UShPPxp_2}J;FVsEYuZA zsRl&a+|hlzx6_XC;AqdhK@ltU!k=ceZ4`vEc~g8s5e{XpJOjEY3Zj^HD`SPcQi;Bg z5^5~@%8P<1T!}KSBD9etKgTACtQ(@KgIxPO@6rX_LCovr)yNF!th3STjCmojE6Q}p5Q?1W zN4!i?qR#N-=pv%>9t=jjlp9RWNPXv!+j=_~p$N&TBaboDGZ*oxxp;EZYo#Ei?jnz@ z*_DpgKL)~CFH@t>*q2c8CE{mCpJP7FK&3k-N-QYJ0+jx-k&#)Wl{MyrD8xQbB$tRJ zcw}s*^6(XzbK2%aNL3J^Y_r6sO`7pZl||UeS+n{+N?ooRY&PEKWrJp1(sl7ud~q0L<`?HJ(rdWOPS+4cX5}y)lgDzL;54&lo_zjg%7JH`e}R0w74M~KeukcL zJK{0&L#)ITVjH_!yFH#zLGKRF@r_*}DL42%BhGL7`h45dyxtJ&4zXrm%Slwcxwok_ z#7jb+-W{bOsl?~?c}qi@5>NkQ8i$iLHHYlYp5ET36OyC3=|rxkIV3f2IN%Az`4DIh z_J_2c2To##8`*@s1LIrFcgQ%0Cxw9)|@HK^uz9zH)lJu5_^a9<$gs1<6VSAH@K2mklWbo-Xr0Z?% z3+aly_}J_@?m3RKPL%0#fuXfsy`5>nb=_UQZg00I6j4}uj#Mo+Q34UXuj#RP*n4Y56Q%jQ-9;ec#F7(|)Eep!wv>jfC8z>AL}D|{EQ1h2 ziD;pam7dn&$tv2*hK#HNDIVaU4}gbk&Ap!Ey`GQ_Xe$jRm8{&*bwb!^YA<)%y94(AutwmlRlBr?P1AV6?=*jf88#QSI{B3wUv#~DFgePFY%m*YlhuGqi zII9!%;4Hxjoz%LaE0pAG^0bB$Kz{K)kX)0e_q)l-9B0UO*3{HQ7;xYl51w!si$m8H zr)7h@U8tk8xHRN0Il;+K2ENHpDLJ7bKegn9mi)Am6FTzKOHSy?&nP)zAV0I@gpvHL zk`pHKi%WnF8UG=DFEH)%_(S|+>MfvE0LdTu0U0{mTij^_wxlo*{5>K^&yc)U5`Lxbu6W~7cLQC(N>y#K< zTNeB3PE>Ix)wKrrKn3Pa(~Hg$tSb#cvA?^qG*tI#Q9;;!h+dB-Vy;||-_u4c0jOPZ z{CJzM4J@b&10T39Mx;8JU3LJ&3&FcwAvdCF!A^5Uv=ecxDP(Cb9yspzc|48Bk!Qoq zXdb_uUr6t3Qe%5Uy~I-jYr7uPdUT$&$FzC6)b1wYv}Q1Eln)4e%X&k4wD!#Iik`)b zmL4q8EG+Gf58vH7pG+GvjrBTI&2N3A;6kPZkfu==~Pa!jyiN~|d*G7d> zD=t>QiOR^B4zUegeorF?bb?-zUn0{;Q-t)ncv^u^k-3Z9h;u2g7ed;iwZQpHLrc`s z1ooBY>ZJ+J>?*5G9033D6Bd_++|6B`X&6&IjotneW!!;&yL86ab!nY5zBbMHs-}(2 zZ+432lF-89*a&JOwZN8=P($%?RETKoIC}bpU_*oYLuCN5RaBYCI8VlW5EHZ}x#~pD zKJ+v{dY)XNWhE!f7;A|71?B&JB;7V2f&EXYnD9+}tkGAO=8d5jZ?^(x8yN7yVihaP z@zhZ4r8XnHD7A^8U;^5YAXamKez($HgflU;L z0-H-712=3&s0$&C?Fe<3JO)uF3T;71es>YYJ*4h04%4h42l9108yxLObz5^zyySinW0odPZj?GkWNXhgt8 zq1^&53XKZ5C^QBfH%1z8k9Z0N@L{hUT#Dd0A)TUyn(#D%u@*s!Gbslt&OQ-`K2V(f zNW3VL>m}kzq&Of4DaEC7kfI+%K8xfmFO!25;c_`h5e^~l;z)j1h$oTXm2!}xAC`j@ z{c}jUSkCW=9HauxFz83#g|oX!L4fG&g!SDsw!!647i; zPqIMdAHDk?gce!Li5z}xEfx&Ny6CyT=gnuC>|A`*i2K|HBTHAX z0_Mei1ov4BR;-$R&8{bR;(mm&Pd>MFMN9j%c`xi|>{B7MZ`pSK*q*VA{Mooa$e3ft zwu|{i?#Va#>Ot_!w} z-4|N?EYtF{7_%GakL}-M`%Kmvrj_IaKD2x6{IhO!&-gX&F`n_(o(n#)@7TS+p3k(( z5Mu>L_v}4uPw2ny27N=IkG%xRFydbPakhgbB|dZGQ(x?J#@wt^yM}3V=MSx9zFlMc zcC%v07k{TuqX)yk_#J0DvuzvOwT~6jD{sH#ti2a#gW?rZW!K;z;Qi6l7>o4eU=0*EpQF^#=;(DUbgXskaO`(nwTshw~!m70x5hJDm?XpK`wBeAD@!i@AKRVppT9*R{~K)^)aPpX&>y-gdq3wz+fN74BB|fP2`z!M)wR%e~Kisrw4|xci9vHus(Gd)*JX zA8|kFe%Ae>`xW<_?swh)aG&(pJYG+(r`S{JY4mh@`aN?!L!OnM4W8|uU7mfOOFdV3 z#yv+ow|VaL-0OM3^N8n3&$FHvJ+FA)^t|i&hv%f%=Jk4Wy~W;2Z=<)<+wYz09rCX9 zZt!mR?(**QUh2KVJMKN=z0G^4_g?P<-bcJodY|>a=zYceruSX%KfEV1Y#H8++>GLk z%8bU0&W!$yxfw$lD>F7^Y|q%0u`lD&j4Lw6Gmd23mT_msy%`T=Jd*Kb#k{+snyh=Xp35>ZXPxwy`Xl~LvdzES zf4%>H|MTL`?td-Yo!yzeF8iRo&z{JBIQvBQ$(%xYpHrK&Jm=D!J93_uy>nj8Ww{l( z5qXz8lDjweNbda-Huv$|ck=>y{drr(y^*&+?~c4@^WF~>2WCt7!0N#Mz%7A?C4AuN zz*(x z7rZR)je@@vh6&( zu=tMRCyU=O@7^oPE$J#*Dd9`DmmDs+r{t$4@0R*X8^ve#(yr1irPq`mFMY|pf43}9 z7AZTsY+T-#-BI>z+54g5kmT41tqolhdL;Btd4BnP2~obae7yXj@>eP}d0&xV(Oa>h z;y}f1=KZmXXDj{^&I&in`*45w?C|yBhr%XT;rA+wE9Y14s=QIQSKd?k)5>?Nd{vS{ zXH{?2hN=TqN2?yMdb8T7u9Q&Kjn%8G4_4n@{j7QaMvbkex@KX`+2Y<-v#(~N=HVL2 zm96HjT4!y2?LyIJ)UK_)r1rMjM{8fJjmqA2?z+ypO?8*n-60|B9QzoGt+ zgs7jWf2{tUhOCAzdEYS9a8bk2hNl|dmG_NWV`by~#vP4^#l5ZZNaI6|FE_p~Tbqog z)~0n$2b=CT?;mM;t2wK=t9gUCcQo&9KGOV9^UKZuYAJ5%l`t)HTh4B|y5(4lv{ZY` zORcQ6zICMaBH7+L-uh7MYi&lGl#{(}cH8#0!)^DpJtf+0Z7;UH-=5zt>9w`bZ{N{= zP5V8vZ~MdTC)!VTly>w>*p9)DT^-kU+$&o<9_e_iGpnU^R{^(o!&Zq-SmUgrF?ADADRAEud}yWw)S@QuI;^~_qN{enfFikzSHOJ zYwjBo_eS5kzDxUV?|ZE8jeaCrc3yJtK-; zd|~E4BH8jj5{e8)_C;=qJSL$dFGW6>RXD3h-pyJ#>)cu6vyPc}-<$Qytmy1w*>iT| z?A5am&c1v0v*!I9bG&o9=4_Z_THl*<;?w zyi4ZYIq#?Q{wD9|8}nP|ubY2xzLcna{v-3>T9CD%Yk{fL1$!6Vw&1A+?=CcHUs$_v z`NB&V-my?xn{DB%i`b%yMbarD-{6tK zM+e_t>{~49u`M24yle5bi%pAK{K(?BhO&mbhD_{3dxvftdTQw1B_{SIwM&*Sxpc`L zOC)ytl2?bF!|lUshoz)#!$*c68h&|L+MjKyv9xyS;L;ru^V0oG?^ycm()UL4<^4$W z$l8%ZBlnCvCutpdbD3{h&$6w{u3mPZgjn|YvNx9-%WIdx&XtdwcQ35`$Ev_8QzNV9 zuG+oo=2efbdQ-Np`q%2>)dQ=g1Z=B!t-f~k@zs(>`|4NMIM=kVS-ZyMcg>MC53PB5 zjmhs?V{Pr)!L=s8Yxl3cW9_qR-y4~-Vo?w76Wo?iFPdhdGE(%1K{-?09``rBmN`g_+uxBhP%eDZEX>4wOLts4%Sch_&Y zf5VF#-jjD5wT+b<=WpCD?(7@)ZJgNn@WvAxKaj1Pj7_zh1~*A+ZJYLQx@FTNn@*T* z?`-yNZr(g3dv4ya`S9lBn_t>|Qnqf%-qN{c^_G2N)x)=3VP3B`ujA(Rdh>e3yxweH zZ!@p=o7cz8>vQJyW%K%qd40{izGGhhVqQzH9)-R9M2UbD?>$h=mX*IM)1WnO#D z>wtNkZC*#r>n8I`D?O~j#x60h2h8gs^Lp64n*5I4ZQdU@uMe2lN6qWg=2fmJ`IyP) zn91kZKg{+I%&W=oR?t9nY&H4YT5R4|o7Z~t8ZocK=5@Jw-C$nNHm`fktI5|^ldr8N zUt3MSwq9exnfz@v^||$!`T2e3^npdxREj6#5=GCNkyD9hWrrfvhGuy8*uSdH zW?p^fwa2{9N0or19>7rt;8_0?9LMV1`l^dhQDXtjWDp$n1dcj_#Pd1o&p$Lrk^N;m zStHH)I1a)u+ceZ3D$`OR;ng@g@SF)!Lv5oFBZo|iK2JC$MHad^ptuP~lwBK#ipfA? z%?0;`7%c`BYge?Xv2;LHqf(Mil8Jm1L@sJHH15aip$iREpkJ-Y*L87{j z5Q+R-@vHuE?_#pg0gkYpy}P~ggn)$08PRIFV=rOqNX0znK38714rg0Ry|wkrPf3_Tj{j+olMSgER)Mw!6=H8<$P0If~Ni@=0rK)9EXBr)L4YoDrW&@ z$~g`-wVWxNnM{iF1tyvASfyyCM)i+dtCfoMxsn=}_)ylK=&dD$vWH}E@g<^b(%B+f zv4^5YPd8hv6sY&g8WLxVYNgTCdy0jLxza6hicGpB=ZRXpH4bGND_M&ID%LLX#w9SB zEyih^+!hs#8uQ~pl7^D`rCdMJJEY_H%W8g|FzYdWh{ zZ3;qZp7>qUm=_cK9=ZGnIC21MEB7+R1)`A zDGI1qyP_?1?$^<1*qfx+;k0vKwJ8WC3F0%68r45O_azl-gmhZ8;;)+mzk*ArXVpd& zrsgtVCEmriip=GBqQvR z%U9(~lGlT>Ma(9E*V&#lCfGJidKs(rsk4HpNxl0!h_Q$aeb%a zSN-EY&5B?4P0pLGQeHF#{zU>mK`6hNieL3lk6-qk3csA`$q>>i6`h1X8A4{Xafr!e z_;Deu8NTH59^&5O+vw9cm;X+!F^i5%$Qn5m?nFpA&g3&v^-9gNsD_&8ZLEG$WeDGh zLs{q}tkxMRpknQk!ZULrr5?ArbX+JH8j~QC{fdP?Q@YYh6a9(>%3R|ptdnt}@Sd3q zXLp_y7lrj#LO)|FS|@XU&7u)1uczIXsjyy6kK9pK9bp zI^;Ns_Ld$p+1~ymPLYMKWdHea9LmByVYNm`0TpYXTt-&PrqYCz_L(74{il^))nCez zRt8o&sWnxLRti<_oX%Dz%e}4mRsZz()92n+{AzC_9sX^k7qKs>d=u%!YpCJ zYE2wn#x^N`>A$Cnfl9)Yk8x%QnIlUYB@f>v-b@};|2Pk#)r^7h*2EcLjni>kQj)UH zDl4Soo$SI&&-aq(NuDFt_V^gdb3{OybjP6-rpaiEQ%~ci&xI8XS;W*5u|lScq~DKo zYLS03zmLSBrr6&QP%%9z+RntUj5kEvs;BYUN0H5U3zE@-@|bdiYF%Eb=^Rf%|MtmL^Z>sa!YEGZR2 zT2LHC%HjwUV~^rh`IaRy z{V4oDq*k+zpHbB*$TaQ%b}lmt;LZW}#b+n40$qD3ks; zR5~tHYno1`h7#aSUF7OTYzZcLdd4-=1OR$kLnC`VVXljHxm5UCVCLGrd}P1>@W z6HijB#m}F~niA#8Yu73w=br)%OXgSi92VnYfVo-PChJ z!Kg8lx%f$(zA3n{K;bDuHC8egPn#{&9*0V`cq!khAY@w{BGuwmo1#U@iXa(#hA6S{ zL!ZX&S+q_`p&VVls`-#O<5U~rPqkKo!Njlnr^he*CdW7{{`C6~5<+E$@hFhWzv`bJ zzszi>!mm~c>G)T4rknAqExm?*YSECw5U22G)M{!d{ThVM=yE*IDqqv#S8WPHS%8#~ zz+h@4!WYmKp1{?vO(N3uUrt%*D(&qRS!Qe@I4InMalb?c0orzY;)ZTkKJr4_coEai@CRpiJ{o{JJ(n+mnlCz{EKBE#>RFWG7LtGPN zvYagxnR1j=C(8Lbi=5Snsd5%jrkvwYXW~NAIho(K>O@7NR4y#^nOx9#$N^;J~`l$4V9(5Eol0S>CQuh#2 zpe?H<#YcP(!2%ttw<~KQ9uxNvq;#wl%lB5RN!(va2WuTa**~O%Pu2J@Nct(VNY=Oo z$|8egjSHw0-ql!V;zm_u(|fyAZm3$BJPR#!nwph0&0KI9_jVR2lUlV`ZnfW3ZmeUS ziJSNwBPpF(H#E=A%uTx4n97av0aJ4$$C6&t8iA~m&Lry){*Jgv^x=tcaW6}dj6F|1 zEwf8XL1osW)nZAhF~mB$(oQ=3iB>Z?gq&BB+f)h_hIkCIQYiJFoXuD%R1!~zUs?j; zpG<~vTTwnfl|qFfJ%v)2Q&E_Hzst%)`kJRakFVe+=RrbHWi^q9Us~)y(Gch11!|3X z4C;RIT<1mzrbMe1&Ma}D}i5)5U1TbA5^QPz)DB*tWFP^JgZ+MoCzLR z+E*?Nui*@)U(16w=*k+=`(*T|7xzKqj0jhYQ?Ymr^lauu3GWe zrS%mOqE5(~AQJ7|ieL3lk6-ppo@ZOdxau{X{8bxS$kr+GQ)}Wp`%Tj5sTN`#UCA#| z{t_ZCf5I?1M5@0RG?*5mFvKNfrBG@}wpl%al}7bmahkYhP_0r{N@B^L;AN6Zf+vu2 zP4oo6jYC21YAoX38s7y}tX)B+%30YDK`1FCN>M_@<(z0|R%%rLxD+K7YJ_zD+=@TF zKbH{7mnPe}6~F4A9>4Ue$#%Y4NKn0|vvbu(5|aB4a(wA$toY-*94fClkuoO|&8lR1#LJ75z@opNKcE0V@}(MnqIeUBxRx ziCjoX1w-%znaqX2oWO;wAtiEQfl9(^<>D2=MIzoab0N8&nhOOp85h4N%xC1n0%cZb z6;>-3uUfcJc!>)sk;L_fRT~mkzN)Nqva@%Iu*4PhNY;&@B!MdlMbC^|pFvKAi&K%h zUba9d)TkBBiRZ>9|m15X{uNkdO*SjhU>A zH{$e3jR>nv)`bNs39FTh|1|kar0q;xC_2+e|5Sd-BPVkqFeg}4I=^j!GIgP%#bjK> zY5RCw#G`29ys&Z+kH%-_LdM-hzn#j3!a6k<>0@{zW$9wLgosnEBtbUJ*qd?tK@?Fs zR54t%nzBxh-|B6zHTx&#P!i%=VOwX0ARjR~g{pr%%aIf+9Mm^C%Tbugs;Pe}gzBvz zNHU2$B#$vQL@E#eg%M4UDQKYnaUR~HR{5~HtxEb6@urirvO%I=MfpTIO9;hlvL*bHp7C$TDYDR&?0x?`4i!83C1;vdiUKM|zrvV~3u}~)%Szf*qTiB` z3Pz2Y%*8u#`ljIGPjRRzxDZfgj8S;wdbH|7wGvfQ+VPrLA{P=;!4Nz_CUYS$Cs?$s z;w4(N1u6-vRTl!v&i6;i(XEe2M?h)Zdhr z`fh?ae$fi5C5|}lR$E*tTIH*hl=5@QejyQt#w5sOTeMJQ(v{A;TA-%L$|U`Xc+bp* zwA!h;P%x8m@t3$O#(T`Kscs%1hGu1qDN6#`A+S zL8jB1g%gusnJ?sGr>0)Ah48k-ygtRm3 z?D$&(y-JcQ!xOEh-XyoOuT>)ox0JBb9nq0IJFJkC=N=Y{rm(BOC(v!hYL&Bqis?Z? zrRI2vl%?}R5+Y8uvQ6n@tkfv`j{6uZ6;cDq9?Xhgty&VbC?TZxk*go87OnVI|Md7} z->LAcJU<erJOLjcC!H1*jG`x5ivp*V$P`+%K$+hcqp(`F zDCw6PPyso>7YyDD%6O6!lguO8TV+6y7sw zQGKIVIxWgEHwJT9oulEh@Zc(xO_sq|>4tL#~djTKv$W zMb$qpMbT<%QT9#Nq8eApBvp%wdIgcFMJs+KhdBO!2`#D-((%7cK*-k>W<*HjU$qf_ zf=I+KFqpnb^-spHMmQ7xRa4+!CFmrG$?>cH>G8|H$?~`I@0bF=L*OTf$?>cH>G8|H zQ{f+;0{^hUPY{#iSN+rDmwl(gulA7B>3_Are`biOC8YYNr;u7xa~`XP)U#sgC|n{a zv_3CJbBQslg{uDP@yotbNodg&8d@aq6U5{iQvK88mwl(guQK0s{7<%WvGT9_r^j#2 zx~(~vRRhuv;+CH1=Ov_qA&)_j$$tJLl2n49msXRD&kOM{gnx`#pv>nv6;`XC7f|N+ zK`XrJtY{nI#FN_ex-6|0RhxoH)Z$6%Y5FV3Ysjtb{3HIM_>c5x$0y5B#aRVh8h&R@=lCrxL6wduL!Zk}Kjcw< z8t}UK^)rHXg&Ob0=h|H*u>`1(+5 z!6!PtCvP6k1_HwTj%PZlWANt%w0LjUy6Pm&p!a-jr`dBZWEC6Fp?qE(u7 z$xonJ&54mH7a`3q!VHj7bK_H&DI1a+Q86s6fv|hDz1lu8lC(6EsI)<^cA*CA#CY)* z&}H(k>}JBGVlMg-)equT?im@H*L<%y=)}DS`%;v3Xl+_M`hOaq+c9e;Wm`@D4w@qm ziPM4xZxf?YD|x;TwSKKnn;~GR?_6-T8j$AP)dob1E_{$_A?P#H{FG*|l(L$cf<|q& ziBno6$#h=Ks?_|1x76mFaG7!xtw+$7DJeI5XppsO*V;l0yrOVWqpU#ALUZ1O!o~Qv z7N4tme=Xh{{AXhPYbAHuPy$W#DQH4J%5q4)!slqZ=tJI2w3-yCR`Lq8(`(LA8d0MN zZ4>>a6-YU+5N*rRW`{HvnzK&Y)+%i!wTiLHwx~UBL90d2HDXlqA}dh~PNZhB_=M!V z4)}KgQp$tiXvVMC&eP7-&X0{xS{@U9sC5JS?6zPht!+RhO-NYVgw~Dbh>OLiWbd0} z^pb?(eI3I$)NGPst)bCC1k9 z$u{IiHf>Z7L0>lQ8dwu!i{+JXVoiJ=?qh!kndZO^;y8@4c#DdCi2}#*%(x)`lus{8wf@ll;!?D%kKg*wJcs zqxp-oYsGKQ`Vc)`_~qFQXr~o2aKaakn!iH3&hk67^qaC-uu=M!(|g#zvv07M*+cC6 z?1$`&>@ju=`w@Gb-NBw@Pp}`er`Xf%CwN}_r}*aoXW7r#&)F~7bL<87OZF@FBL6o& zhkue^#6Q6f@*R9PU(46<3-Rj-U*fOuUF;Pt$Tn*st(<>~@6@ujJS|tt((?I!ezE4~ z=d<7Qb$pcX(E@z0R-onJw`dRXb3oPC*xl?wel9-`ziYc*{4Op1KJ7-W5Wf}t2)_cq z(Td-!)q1q){2Ki3?KSLk{AzX=yO-U`zR8ZUZ?WU-+w4B}9rggmx}SZQeGlK+bP2l^ zzp3+ib{&4-;JHo!gZbQ4xK7iL9ECPAa@5Q(wy#wrC{uzFwR-|=nKhd7iey=~G zKW6jVmfAMhHrs9}y)u*=Dh!o|Dniwv)=+O~ZfH+vU+9X^z2!OOf%1y->hk6at-`3t ztjMm&t0=4}t!SuNSh1zztg4?@Klzb{Un{0R!W3d(W%mJ7J}^B6OfL&e+~%__vu(0n zUwSZ<6UxVLP=*Ai=_aOwX)yVLsUU`FyTHVOiL>>mX@21E^lSK6gMX_Ti>`>$uOO%V zy*}%8%*NLvuV=iDIr4{Jz3zH_=W9z}`_pTGel6>@jMp%)UUTB}5C8DzKm76!mooOs z$nP$~AZi|M<)s|Yp=vMU-^=nJUSHMzp(}bn>$m8)=|}ZD39)>|^(fsX{^=9= z9RK8>emz}vve^HtXB@1ZX3IYQDfU^+nJ;0s>}QwrJ$xo6Q7&IsO98 za`tQXJIuV-*z4>K_MbTO_h0NS_DA+M`)~FR`xDM@e3|{3{e}IOkFme9zp=lw_t^ii zf50oh&;FNvz&=C`<0E#Gonlcw#J9532cw=EwQ>_;>lk{1N^i{QLY4{yY9We}+HJzs7&TPw*%C&-qXJPx#OHGM^Jn?j`Ahs|{!9J>f06%||BC;b|AzmVKf(XO-{ODg5Auij>--J=H~ts? zKm2X}4u6;bCx4SS@n+t_TX`FA7rvJK>@~vUeh$9<8vI5$JS}|TLH=2Oh+nSdu`ODu z79gKa{#VS$a?J2<#9bZH#>Ltq%`QXJbUs9l8q?1~;>R`oIK0L;+eqh2@e}k8yAANV z-);mOI&#YOs`8wga=m;nzjJT&7;hZ)L|+`$ozR8e8jm$j?44*m)~Y=peV6B+ zYUQm_N*6Ge2V+-it$4D0>Eh%0BWojtgn)6|L1weDF~TxqiJc21MUm22BYg}AZ4B(q zQvjn8S57#u!k=ZhiW_n|932C^qrFsffR3)}aD{<4Td&P@XRhe49jNg+dM3IYwY9Zf zj;;x$MKhSBMyO`GZB=aPT2XXbWh;ipp`2 zIUI^m3(d?h^f9LEbCx){-9FPkToh@t^e_ph4x51p%p}8)MzR^Jt}H77?!xT43cue4 z$vWa}WpbY$uBe_Cra72n1UlNgW^&1==fdkU-B~O88)nsIIs=Zn{>2T`=eR&eVXszS z-`MNObu>0K^c#0+KLg)oJtM7GwJqxn^|m^DLU-}=!;Kvcm({nn)sYN1yY%$s+I)Qp zGA83Y88+VDz>PrA(adM^0lnMd;||`KH8@gq^HqiA8FS9QdRgJy|_V%pl2|8Y=ZELB&oL1BA+2098rOo8seyg{b4__!aIR}K}}$C%wd7mFpXZH8sdSfaxKZJQn3X6v&-g=I)q z49#w!B$O6nG~#D0kdsO2po?aM6MS@ZcjSb-%k>V2KEV5UdjK+P;1$&cfhQV|@kr*G@og+a$FCkf&o_>w5nZ z($Z#7)W?U579Vc|FY(sigw{wddVqahqjXPo8Xb*zaht(1a>~PYS3v{UjX)oFz`D9Y zIlpzaEx+)tyaz^C`69;x+KE$d7M5If)+qntRYIrv(CM(CEo{~4Y{DGVX^mB<-R67@ z59o4&k|jf@4>?*|T4p+$Rzwiv2hJ3Z)Zj=%j)Lq|sg57TSMioUY)zUA%F-*>L) z4VCw;XeXvjpK^i~u|rZ;WsGY!jeH7YkXFoXDDPzo%?bvwrGi2IYM@vnhSWE!fgng9 zJ}6>Ea6?tf6*_!+TdFJa?3*ajTu!?u)g5Uk9>}aH&QA46bozs7XF=9pcFBXVN>1L0 zj90i#^2?G%GF?TK$(MVsUQ^Yr zl?0->L0**=@?~XenOPa7zUZj{zwx@C+;E91;)h|5i^FVd66 zVf{nQjvTu?VAFNms3~EazO*h_UQ=EZa=1zmLlHhR;qHYqFv#~R zX$|Nzc{BcXRX6ajuBw|==W~}k!=cLpmxWq1pRKZcQQfLwcS#ilTv$J?gqO62GK>m) zNl~{xT-Y|Z{)SNXcy;Kex;ZTc`e9jy&1Q2M>nY>8>n>jS-qG6$=v;5T0%@ zoqnZSw;B35h!-MM2#28@YI!C8tIj16iVV4w!v{2Qh%=y*z#K3*xIp++^d0w7x@OZ4U^=rgsGPwg>$(VWbL_*|Gt1A-@n^c7hMk3Z z*n!ExDO(DgxssR*-5sWD&Kc18ClFzS&Iw0LbISmnbaQLVfFYdp^qw`!Noyy_4~rD) z)%w=<+K!ddGxLg0cx%$Dkm+K`lwxmFq+W&6-X%8BsBbp2J+~XRuAn=R;c+6pEoRZAJun87mjPsmk+F- z_R+L8Gb%_{r65A)vvrYb3WhcfdgNSWVnUDh%}{HfeV8J+nN5~EV*g#wf07&hMFIEa zMc*spZF$k@+Vh8QJ9OV6j7|P|gWxS338ftyd4w3%j%+T+98(V;<}U@JkLL5*K=efO zg!cUS&EwJ6FdoI$#oDh~jj%o4hRKSx0gMoN2xhKKfI+j}jzu6xQD``)sv@_pDofa2 zIm}K3R%lNI+63Ls#OkYs8zs&*2n%vE!|e2FE2kIh6IxqC%S^)z#jZX4`tKs#hC+QS zI(S9hwRgh1xHr7ho6-9^hcyYV2e+X6ndUPPWhM zFtEzW<9TE<-R)hJ!<54h2}yJqfxk8nIH&(Dzz;8VwGNK;b+7KpHzw>YpXzGj=Z!~Q zU90-a%6gY|)X`oQt$hxFCofs547m6-{R${0L}CSm1&~+*H4WYMwdb~eZE%FvKyUHF z=vSlH@y001H3(C{!GOa~*^>?@4t?VDEnpFz4R7tt?)Td$DCjVl96oIWuL(rw?1kHU z1jY^6)9+)9J$TzeYqZ9oT}R1v2(ulf4Ty)PmWDl%;g0r-a(_t{<#;(ALAa${M@Lup z08)8ctWmg;ep0x~FvT7CPAoBU^N{B2;T`wYP&{^E(dV>p`_A6W=i6O&!|p_McH3iRfEQ{}2fH0|9fTms85~z)9*$wr|}Heh%TF=xVDiR`m~gTR7L&x%>=ZXX(M{336WLR(Kl zo4%}L!S=%FFL{3SZT>k^bg-jr_{~My?^pxdDHG%{Qc7;O?L}W4GO27GAOmpSBHcbMM|< zRoze-YIgbq7p`p{>MA{VQ+ZW=sJPCV9r(m(>(c2V$ezCSnqh~Ca;5S}sf|orMKdw) z0V5M5G4s32;cvq^O9EPU;L!~e8??FOgRsD$uee~X;R3h`3D!ApEWWu;;e0`?{?yi{|oN@JNUn%j^?(uX8vK+*s-7J zMUhG)*OkSJBLxl(C{UsV4uKAi%3qwN3n!Hm$fc)-ML&OkR=(5aaJZbotnYv%$aX1c`Zv&&Lw1%8 zjqxE}iZ&Llau9<{C|c!!raY{&%m9OWLbv|b96NO2fG^H)-*|iU&Dx3DtF`A(`$HL- z8TdcsKOL?A*T48ngp2Zt*}&ytRgnrE+%J}2YOgAB6P?Y4sqtCcZ|X!Dl1D3;pbfMkJ5E`d9y>cAZi+n9i-Y~I?n z8MwOPxSX05mslbbLeLC+lXZ{a49S!|3E38eygo68F@|OL9AkJC<$>(7((206hO&l= z?DDYRU**QKM=r6&3OiOx3X?pWwP37HNh`}xz**fp*sw>-$h={uo}FEoy}GN{k(oKv zRX?lN_h0p$MxItxRo8(`rTm4f+D3XxcW#dEf$!^SYVQe6UtZf%e|=?bYyD*n9i)>C z=%fQNvl#oczm%RI+t8Xrm8FE+5=^_b5K84WF-NUa6MIFKdQfhET^lKYTD8xS!& z0a%5YI->X@SD7MAlp#Fk<*34ojE<`CQmO^R%`gSj6J+4%@bOdAQRZ_5QM}#O+qSGP z#3!`Qnznu;XtcJ}_h(F$-Q+IoS=##f_K}{FlFq@VvYN)a&sR5BRct<2%*7z|PIcu{ ztds^KeK_4fOJ7il8iyUJ8kLgZp+F{b;xQDMWHhS^hsr37XZw*fmGV-m5K+=3&6jE) zB5fSS(Sc8Rbd2^396gZg)1fS9fK6A+u}Ys?9HO+_goVZWw6WGWI3@y1}UMeH)OR*AI3fM3nb({@tDvHc&pO;cviVScUjO0f565;v9hiqc z@KulcdM3Q%ddXP`b3uA86xiW$$%UhOr|V*wi2N1%J5=1UbDMo|Lo3X^@G%(r320?`a zz$i185R^7X!5W@d0f{{V3|vu>Q-Q_0-|vR}u~v}VRfyp-_|T@6+_N&T?lpTd+^*@^ zztMDDIvU+`K0VV_o1JT)Mc14h`z)<`RbxkcQ)9>A?C1u5ZFev)dJbKJQL6TZpqQ25 zDVJTl__!OMs~jK7y*A_kHhnL-MG==!JeI3<(C7R`dU7ty z4d;M)bGy)FJ`?&h%TJ*-*s9A~6Is7J%W>3jG&a#j9ro!ux-Qt(6mDwoXq0<&GS{a) zx>`1|_;?}sN}uav?o%eKa{a}}ive00Ulv&N{Uq2)*9D1iU=0g{l0uZ6b6m7AsO2^0 z!eClQDdrWe4CKOqZUT-lN`Mt`FmhqQ!>^8=H=$RTp?qd{E?qRX)|2s>bKAF`txeeb z+XtPtl8WK28$G@&E_5#&4Ca>>b-J9M)eD!cUb?Um`Jc_@S-)`A8ezYv12CL|q&$sk zT$IKe2y${AEp|>-iHlqSYFx6~i}VST9t6qF9~6id-cU4A^z*!*S7HZfJnvTQ09D?g zRUbQc)$3vl=yl4a`hn|goKy8vPDsAX%n5-uSEa)z*$;#^nvW^tQ@~TC#yP}51l%J!GVYD_Y-(4^ZHyQP-b3F+jM8j>vIiD zUY`@N-l1U7N$U%dD?9X1=5`OZwQU|`zCPQ!OC6{caPBeeNc30lI(oC08c{U0yKI|4 z>J!N7i%Uw2f^9xN!@MggrW}99sS|v$w;_;UTwIXX;Em$^>*@al59`508P$3UvYfG; zo_G*BeQ|biET<1llF}n`hF|#H9er3haVO%O%dzpBuO2x%AN4@-fy1-wTM)GKlbIfe z9qSLD^OMo{qi0n%dfKqxp*~91XKqwLcvAT78#Y&-IpEvU22o zZ2kX%s>8he)TySy&XPmbq1@HmcN0BSKWGCzc_bT()8@`N&INRT<2s=OraeInzaH3zm>kQr5Xf1I7rclFZT z++01wR+5{WXPa_g^nqYeQE-G_z&F*X^WhtES#6}s=fJ8GWhHVEq;q`>?J3J69q`fq zQmpe)%R-qpHbpW;xeyt7-jDj6*gV1-*T~7ZHTeCzHr|Z7?Zu7RURw0}oa=A+#Hsra zK^7!$hI-5ioXrGII-Y_v$Oj=R+ZaNLkeAv!sm!kNZ=~WlHk(Kq4j6kE>SsdLVnOoyq0saL*7iBN6PY5=Keh|Y83GD&MpcXEwivg(LS`jw`)~*ke{xM3=EVn z@@CtyZig9Mb?dX0^QxOV+iIzz9h$yO(3%JRQXRXQ^+viYkc}{oEdo?iZ5)MP%n;f( zI{>@fJww!!9X zr?X>d`}FSBy(qnR*0;{aefzq+t9=27%W%503jdPJe~|a}T#ubUYOCxWY3*FqUtZL) zuqjkMy|s1vRXaTyAP;U(zwD}gKD$mb4?^a>V*Ol=kci9*MVy16lMV;%0QrJ)%qrT} zg_f}$A=YcisJeN_qsF88M`j<%i#`*4+MA+dhku8&YMd28K2+zcW=kT2+j~z@7o%ta{+~|^XiMryOA?=m-6cmJ(K76K{CiusYX~2KFUxfqsR>byZO{H`in2i^K+tPdD=y;U7QfUpGDI@{G(s`t_exRJ%O7 zk)5C08*ZB4hPG?(3$2@m<=KAjWpIiB(zt=W-Gi<@^%mNr>`?NI! z6@TjJ>u>Mq>x1k^K=|39Ifo%5^&)}ECw=N{FU?w2jTXMLD1!P4HE9P^+T?kU9F_^y zi;5r?kZ`GCWu3@7!*Bbqs^NdlkKTO;?{|S*r_bjtbZc|ZeShNAQ%LahT^<+g4gN3_ za$JtN?SaKjXY(Sn+d>c}j6z4Q#dWsRfyibXbE3w9nT|kBhitF|S&*OMCE4arFPNU= z_Xab94meI&7X*F6e zbi7Eegz~TnW;=|Cf#L~{6z;$+bIs7829F96T|J|3PxnI@?POcR2 z?C|Lu`5Q5Kp#nNeB9v*z<|=>V<|l93t=~(2@5`ut{s-1eG+$+=rI}?hAQ}T6_~^>Y zN_hrPE=EOCg8ji*RrD_BtfrI2@!a^JYs+!v!Rozt%J7pSQ!?7A)X$IGFqbq{^ zPr>N#u(^=Mhv;OD!`O4hQ`i0E`t$6_x-p7|HP%&Hot6QD9jqc!hEL!DaFQT-M#0?8 zq!o`TILZh4!@=mn0se3xx{&9L^TXrl`L)v@@$bT~XTYB7q0K+fg@jd8_xblI``=?DqQly8^e;X@wY9d~eg>wBnK9 zRF#)kjiX9P3UK4p_mAP5?QE8;K6ywhw4msZ9g>WF)Ju*Fy8wW&c*4=q2eQTd;T`0t zdE1fV>We+viKFB2h@}4sexEj7;P9{w5=$mk_z4j?y*Q?Xj|H|o01!HYDADV@}5yB29AjB?Dw z4WrK(ZfPzLjG%q389^p6hnG*3X_j$K`Ub`yzd4w%PLf656zBqtVAk z`R5Ds`tu6K%byqY_4h3hFW*f2DcpR`=^M0}@Z;vWJ=#IXK}p(-hPxE?+?}vIoS8&< zm<(kU$u*g^!K~-@?37E$vw8AtMAUo=6Nxh&3+`oxao%*4t`rKz5Z2>t4BY65sXJ;U*}2oBMv9w0Jl7 z{c3TpwtqIwc5K#^OzgWM)0Me;dc&;x%)iw1xCxDdRZs61-{5r8- z=*OYvV#Fdps}=LH1cKN2VN4HA#8?d#J)Eq?!3#07X zW=->AOU&4RBaTp9@_SFv?Q!G(py&4p3V6zPK0mDu;~m8j>`&s1o>`1Dor%H8n~dV3 z>4+hkzAQJ}uoWRr8|Mxc54;mXYDy_eEs;@7oLR^3K#Dm+rNL;{)@*h zz8HP!H$9owh&@aX^5+>5I%$bQrA^14lW`ELc2Yf>R$zwV*i8HTeU4$TmoXH?GJP57 zC7;W3Q-MbYauL9FtoXa+f=j&q*w0_`^ENEJ@PG6d7hHhnRZpw6qMyDyfTh*HUo*lD zMCa+hV-6mG-}4{~KlES$(i`=xR1MQzxI-xb#~{SXDq1Cry+|yQDWrq=nTD8wXxE&3 zEgcx0#7Yl88Yw`^KfSB7p{};NybR~bd{{uBgy5#ouR=Sfn_NMdaX&~UgV-Tw5g64m zxd2jVen(NI%je7~nU(4Dhx6t)&27!{`G@8B*z+1#_|E!&yZleu{9{G9pocSM5T z9&hjHC+1xCsq7^e@LTFzM;e>gjja1?NB4>iBO6w&*td%_w@lzSEp}wpbp#sH6LpPuBs6%YTS*E=-gD6x$ za5#~HvkP=2!-1$_zK0)^D|9+&fDR(qoT*2I4n3p+-V8O#(N&m$*Uzl2scEfkt;h~m z69SjV658gjfLJ43o+y?B(+GG5!UrcS0iTs9UH|iL5E>ILC9xn$o*07N;*ZzGqo@kzDg$S z+uQpM0|yB=Ez8Q-xlHp-_-sQ%I7`@6-`Hc=OL3NPB+K`SHG1Ymreo>SCl<$!6Atdk z$sQTW%sgw@a-?u%cXsi}NKqEnv5-R}SZ_RYt!OrXClC!#tq4BjS#}buujl;qZA(a1 zV_svFMnphGm8LSRx!^VUM_*%MJk@$3&epa@nw;