From 0354266700503550b1810a9294537dc0cb8f58f3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 04:25:14 +0000 Subject: [PATCH 001/124] Bump pyjwt from 2.8.0 to 2.9.0 Bumps [pyjwt](https://github.com/jpadilla/pyjwt) from 2.8.0 to 2.9.0. - [Release notes](https://github.com/jpadilla/pyjwt/releases) - [Changelog](https://github.com/jpadilla/pyjwt/blob/master/CHANGELOG.rst) - [Commits](https://github.com/jpadilla/pyjwt/compare/2.8.0...2.9.0) --- updated-dependencies: - dependency-name: pyjwt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1bdf3577c..b13cea204 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1795,19 +1795,19 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.9.0" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] From a223a824ac90f9344824cb0c3d577b05a8aac472 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 04:25:33 +0000 Subject: [PATCH 002/124] Bump pyzmq from 26.0.3 to 26.1.0 Bumps [pyzmq](https://github.com/zeromq/pyzmq) from 26.0.3 to 26.1.0. - [Release notes](https://github.com/zeromq/pyzmq/releases) - [Commits](https://github.com/zeromq/pyzmq/compare/v26.0.3...v26.1.0) --- updated-dependencies: - dependency-name: pyzmq dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 199 +++++++++++++++++++++++++++++----------------------- 1 file changed, 110 insertions(+), 89 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1bdf3577c..68ad7ecb3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2075,99 +2075,120 @@ pyyaml = "*" [[package]] name = "pyzmq" -version = "26.0.3" +version = "26.1.0" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" files = [ - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, - {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, - {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, - {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, - {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, - {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, - {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, - {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, + {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:263cf1e36862310bf5becfbc488e18d5d698941858860c5a8c079d1511b3b18e"}, + {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d5c8b17f6e8f29138678834cf8518049e740385eb2dbf736e8f07fc6587ec682"}, + {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a95c2358fcfdef3374cb8baf57f1064d73246d55e41683aaffb6cfe6862917"}, + {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f99de52b8fbdb2a8f5301ae5fc0f9e6b3ba30d1d5fc0421956967edcc6914242"}, + {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bcbfbab4e1895d58ab7da1b5ce9a327764f0366911ba5b95406c9104bceacb0"}, + {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77ce6a332c7e362cb59b63f5edf730e83590d0ab4e59c2aa5bd79419a42e3449"}, + {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba0a31d00e8616149a5ab440d058ec2da621e05d744914774c4dde6837e1f545"}, + {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8b88641384e84a258b740801cd4dbc45c75f148ee674bec3149999adda4a8598"}, + {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2fa76ebcebe555cce90f16246edc3ad83ab65bb7b3d4ce408cf6bc67740c4f88"}, + {file = "pyzmq-26.1.0-cp310-cp310-win32.whl", hash = "sha256:fbf558551cf415586e91160d69ca6416f3fce0b86175b64e4293644a7416b81b"}, + {file = "pyzmq-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a7b8aab50e5a288c9724d260feae25eda69582be84e97c012c80e1a5e7e03fb2"}, + {file = "pyzmq-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:08f74904cb066e1178c1ec706dfdb5c6c680cd7a8ed9efebeac923d84c1f13b1"}, + {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:46d6800b45015f96b9d92ece229d92f2aef137d82906577d55fadeb9cf5fcb71"}, + {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bc2431167adc50ba42ea3e5e5f5cd70d93e18ab7b2f95e724dd8e1bd2c38120"}, + {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3bb34bebaa1b78e562931a1687ff663d298013f78f972a534f36c523311a84d"}, + {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3f6329340cef1c7ba9611bd038f2d523cea79f09f9c8f6b0553caba59ec562"}, + {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:471880c4c14e5a056a96cd224f5e71211997d40b4bf5e9fdded55dafab1f98f2"}, + {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ce6f2b66799971cbae5d6547acefa7231458289e0ad481d0be0740535da38d8b"}, + {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a1f6ea5b1d6cdbb8cfa0536f0d470f12b4b41ad83625012e575f0e3ecfe97f0"}, + {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b45e6445ac95ecb7d728604bae6538f40ccf4449b132b5428c09918523abc96d"}, + {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:94c4262626424683feea0f3c34951d39d49d354722db2745c42aa6bb50ecd93b"}, + {file = "pyzmq-26.1.0-cp311-cp311-win32.whl", hash = "sha256:a0f0ab9df66eb34d58205913f4540e2ad17a175b05d81b0b7197bc57d000e829"}, + {file = "pyzmq-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8efb782f5a6c450589dbab4cb0f66f3a9026286333fe8f3a084399149af52f29"}, + {file = "pyzmq-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f133d05aaf623519f45e16ab77526e1e70d4e1308e084c2fb4cedb1a0c764bbb"}, + {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3d3146b1c3dcc8a1539e7cc094700b2be1e605a76f7c8f0979b6d3bde5ad4072"}, + {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d9270fbf038bf34ffca4855bcda6e082e2c7f906b9eb8d9a8ce82691166060f7"}, + {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995301f6740a421afc863a713fe62c0aaf564708d4aa057dfdf0f0f56525294b"}, + {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7eca8b89e56fb8c6c26dd3e09bd41b24789022acf1cf13358e96f1cafd8cae3"}, + {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d4feb2e83dfe9ace6374a847e98ee9d1246ebadcc0cb765482e272c34e5820"}, + {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d4fafc2eb5d83f4647331267808c7e0c5722c25a729a614dc2b90479cafa78bd"}, + {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58c33dc0e185dd97a9ac0288b3188d1be12b756eda67490e6ed6a75cf9491d79"}, + {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:68a0a1d83d33d8367ddddb3e6bb4afbb0f92bd1dac2c72cd5e5ddc86bdafd3eb"}, + {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ae7c57e22ad881af78075e0cea10a4c778e67234adc65c404391b417a4dda83"}, + {file = "pyzmq-26.1.0-cp312-cp312-win32.whl", hash = "sha256:347e84fc88cc4cb646597f6d3a7ea0998f887ee8dc31c08587e9c3fd7b5ccef3"}, + {file = "pyzmq-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:9f136a6e964830230912f75b5a116a21fe8e34128dcfd82285aa0ef07cb2c7bd"}, + {file = "pyzmq-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4b7a989c8f5a72ab1b2bbfa58105578753ae77b71ba33e7383a31ff75a504c4"}, + {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d416f2088ac8f12daacffbc2e8918ef4d6be8568e9d7155c83b7cebed49d2322"}, + {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:ecb6c88d7946166d783a635efc89f9a1ff11c33d680a20df9657b6902a1d133b"}, + {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:471312a7375571857a089342beccc1a63584315188560c7c0da7e0a23afd8a5c"}, + {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6cea102ffa16b737d11932c426f1dc14b5938cf7bc12e17269559c458ac334"}, + {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec7248673ffc7104b54e4957cee38b2f3075a13442348c8d651777bf41aa45ee"}, + {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:0614aed6f87d550b5cecb03d795f4ddbb1544b78d02a4bd5eecf644ec98a39f6"}, + {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e8746ce968be22a8a1801bf4a23e565f9687088580c3ed07af5846580dd97f76"}, + {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:7688653574392d2eaeef75ddcd0b2de5b232d8730af29af56c5adf1df9ef8d6f"}, + {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8d4dac7d97f15c653a5fedcafa82626bd6cee1450ccdaf84ffed7ea14f2b07a4"}, + {file = "pyzmq-26.1.0-cp313-cp313-win32.whl", hash = "sha256:ccb42ca0a4a46232d716779421bbebbcad23c08d37c980f02cc3a6bd115ad277"}, + {file = "pyzmq-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e1e5d0a25aea8b691a00d6b54b28ac514c8cc0d8646d05f7ca6cb64b97358250"}, + {file = "pyzmq-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:fc82269d24860cfa859b676d18850cbb8e312dcd7eada09e7d5b007e2f3d9eb1"}, + {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:416ac51cabd54f587995c2b05421324700b22e98d3d0aa2cfaec985524d16f1d"}, + {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:ff832cce719edd11266ca32bc74a626b814fff236824aa1aeaad399b69fe6eae"}, + {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:393daac1bcf81b2a23e696b7b638eedc965e9e3d2112961a072b6cd8179ad2eb"}, + {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9869fa984c8670c8ab899a719eb7b516860a29bc26300a84d24d8c1b71eae3ec"}, + {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b3b8e36fd4c32c0825b4461372949ecd1585d326802b1321f8b6dc1d7e9318c"}, + {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3ee647d84b83509b7271457bb428cc347037f437ead4b0b6e43b5eba35fec0aa"}, + {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:45cb1a70eb00405ce3893041099655265fabcd9c4e1e50c330026e82257892c1"}, + {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:5cca7b4adb86d7470e0fc96037771981d740f0b4cb99776d5cb59cd0e6684a73"}, + {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:91d1a20bdaf3b25f3173ff44e54b1cfbc05f94c9e8133314eb2962a89e05d6e3"}, + {file = "pyzmq-26.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c0665d85535192098420428c779361b8823d3d7ec4848c6af3abb93bc5c915bf"}, + {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:96d7c1d35ee4a495df56c50c83df7af1c9688cce2e9e0edffdbf50889c167595"}, + {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b281b5ff5fcc9dcbfe941ac5c7fcd4b6c065adad12d850f95c9d6f23c2652384"}, + {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5384c527a9a004445c5074f1e20db83086c8ff1682a626676229aafd9cf9f7d1"}, + {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:754c99a9840839375ee251b38ac5964c0f369306eddb56804a073b6efdc0cd88"}, + {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9bdfcb74b469b592972ed881bad57d22e2c0acc89f5e8c146782d0d90fb9f4bf"}, + {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bd13f0231f4788db619347b971ca5f319c5b7ebee151afc7c14632068c6261d3"}, + {file = "pyzmq-26.1.0-cp37-cp37m-win32.whl", hash = "sha256:c5668dac86a869349828db5fc928ee3f58d450dce2c85607067d581f745e4fb1"}, + {file = "pyzmq-26.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad875277844cfaeca7fe299ddf8c8d8bfe271c3dc1caf14d454faa5cdbf2fa7a"}, + {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:65c6e03cc0222eaf6aad57ff4ecc0a070451e23232bb48db4322cc45602cede0"}, + {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:038ae4ffb63e3991f386e7fda85a9baab7d6617fe85b74a8f9cab190d73adb2b"}, + {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bdeb2c61611293f64ac1073f4bf6723b67d291905308a7de9bb2ca87464e3273"}, + {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:61dfa5ee9d7df297c859ac82b1226d8fefaf9c5113dc25c2c00ecad6feeeb04f"}, + {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3292d384537b9918010769b82ab3e79fca8b23d74f56fc69a679106a3e2c2cf"}, + {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f9499c70c19ff0fbe1007043acb5ad15c1dec7d8e84ab429bca8c87138e8f85c"}, + {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d3dd5523ed258ad58fed7e364c92a9360d1af8a9371e0822bd0146bdf017ef4c"}, + {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baba2fd199b098c5544ef2536b2499d2e2155392973ad32687024bd8572a7d1c"}, + {file = "pyzmq-26.1.0-cp38-cp38-win32.whl", hash = "sha256:ddbb2b386128d8eca92bd9ca74e80f73fe263bcca7aa419f5b4cbc1661e19741"}, + {file = "pyzmq-26.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:79e45a4096ec8388cdeb04a9fa5e9371583bcb826964d55b8b66cbffe7b33c86"}, + {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:add52c78a12196bc0fda2de087ba6c876ea677cbda2e3eba63546b26e8bf177b"}, + {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c03bd7f3339ff47de7ea9ac94a2b34580a8d4df69b50128bb6669e1191a895"}, + {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dcc37d9d708784726fafc9c5e1232de655a009dbf97946f117aefa38d5985a0f"}, + {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a6ed52f0b9bf8dcc64cc82cce0607a3dfed1dbb7e8c6f282adfccc7be9781de"}, + {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451e16ae8bea3d95649317b463c9f95cd9022641ec884e3d63fc67841ae86dfe"}, + {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:906e532c814e1d579138177a00ae835cd6becbf104d45ed9093a3aaf658f6a6a"}, + {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05bacc4f94af468cc82808ae3293390278d5f3375bb20fef21e2034bb9a505b6"}, + {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:57bb2acba798dc3740e913ffadd56b1fcef96f111e66f09e2a8db3050f1f12c8"}, + {file = "pyzmq-26.1.0-cp39-cp39-win32.whl", hash = "sha256:f774841bb0e8588505002962c02da420bcfb4c5056e87a139c6e45e745c0e2e2"}, + {file = "pyzmq-26.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:359c533bedc62c56415a1f5fcfd8279bc93453afdb0803307375ecf81c962402"}, + {file = "pyzmq-26.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:7907419d150b19962138ecec81a17d4892ea440c184949dc29b358bc730caf69"}, + {file = "pyzmq-26.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b24079a14c9596846bf7516fe75d1e2188d4a528364494859106a33d8b48be38"}, + {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59d0acd2976e1064f1b398a00e2c3e77ed0a157529779e23087d4c2fb8aaa416"}, + {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:911c43a4117915203c4cc8755e0f888e16c4676a82f61caee2f21b0c00e5b894"}, + {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10163e586cc609f5f85c9b233195554d77b1e9a0801388907441aaeb22841c5"}, + {file = "pyzmq-26.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:28a8b2abb76042f5fd7bd720f7fea48c0fd3e82e9de0a1bf2c0de3812ce44a42"}, + {file = "pyzmq-26.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bef24d3e4ae2c985034439f449e3f9e06bf579974ce0e53d8a507a1577d5b2ab"}, + {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2cd0f4d314f4a2518e8970b6f299ae18cff7c44d4a1fc06fc713f791c3a9e3ea"}, + {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa25a620eed2a419acc2cf10135b995f8f0ce78ad00534d729aa761e4adcef8a"}, + {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef3b048822dca6d231d8a8ba21069844ae38f5d83889b9b690bf17d2acc7d099"}, + {file = "pyzmq-26.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:9a6847c92d9851b59b9f33f968c68e9e441f9a0f8fc972c5580c5cd7cbc6ee24"}, + {file = "pyzmq-26.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9b9305004d7e4e6a824f4f19b6d8f32b3578aad6f19fc1122aaf320cbe3dc83"}, + {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:63c1d3a65acb2f9c92dce03c4e1758cc552f1ae5c78d79a44e3bb88d2fa71f3a"}, + {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d36b8fffe8b248a1b961c86fbdfa0129dfce878731d169ede7fa2631447331be"}, + {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67976d12ebfd61a3bc7d77b71a9589b4d61d0422282596cf58c62c3866916544"}, + {file = "pyzmq-26.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:998444debc8816b5d8d15f966e42751032d0f4c55300c48cc337f2b3e4f17d03"}, + {file = "pyzmq-26.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5c88b2f13bcf55fee78ea83567b9fe079ba1a4bef8b35c376043440040f7edb"}, + {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d906d43e1592be4b25a587b7d96527cb67277542a5611e8ea9e996182fae410"}, + {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b0c9942430d731c786545da6be96d824a41a51742e3e374fedd9018ea43106"}, + {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:314d11564c00b77f6224d12eb3ddebe926c301e86b648a1835c5b28176c83eab"}, + {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:093a1a3cae2496233f14b57f4b485da01b4ff764582c854c0f42c6dd2be37f3d"}, + {file = "pyzmq-26.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3c397b1b450f749a7e974d74c06d69bd22dd362142f370ef2bd32a684d6b480c"}, + {file = "pyzmq-26.1.0.tar.gz", hash = "sha256:6c5aeea71f018ebd3b9115c7cb13863dd850e98ca6b9258509de1246461a7e7f"}, ] [package.dependencies] From 402404db068f3be15e407e265d9f4adee9f5479c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 04:26:30 +0000 Subject: [PATCH 003/124] Bump black from 24.4.2 to 24.8.0 Bumps [black](https://github.com/psf/black) from 24.4.2 to 24.8.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/24.4.2...24.8.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 46 +++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1bdf3577c..82dcdd44c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -131,33 +131,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.4.2" +version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] From b9c5de5163f791206b94e24e14919755eda01440 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 6 Aug 2024 11:23:51 +0100 Subject: [PATCH 004/124] Add `raw_text` to excavate and change unstructured discovery context --- bbot/modules/internal/excavate.py | 273 +++++++++++++++--------------- bbot/modules/unstructured.py | 1 + 2 files changed, 142 insertions(+), 132 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index f0286fd6b..95e8b2166 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -291,7 +291,7 @@ class excavateTestRule(ExcavateRule): } """ - watched_events = ["HTTP_RESPONSE"] + watched_events = ["HTTP_RESPONSE", "RAW_TEXT"] produced_events = ["URL_UNVERIFIED", "WEB_PARAMETER"] flags = ["passive"] meta = { @@ -896,137 +896,146 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon self.hugewarning(f"YARA Rule {rule_name} not found in pre-compiled rules") async def handle_event(self, event): - # Harvest GET parameters from URL, if it came directly from the target, and parameter extraction is enabled - if ( - self.parameter_extraction == True - and self.url_querystring_remove == False - and str(event.parent.parent.module) == "TARGET" - ): - self.debug(f"Processing target URL [{urlunparse(event.parsed_url)}] for GET parameters") - for ( - method, - parsed_url, - parameter_name, - original_value, - regex_name, - additional_params, - ) in extract_params_url(event.parsed_url): - if self.in_bl(parameter_name) == False: - description = f"HTTP Extracted Parameter [{parameter_name}] (Target URL)" - data = { - "host": parsed_url.hostname, - "type": "GETPARAM", - "name": parameter_name, - "original_value": original_value, - "url": self.url_unparse("GETPARAM", parsed_url), - "description": description, - "additional_params": additional_params, - } - context = f"Excavate parsed a URL directly from the scan target for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) - - data = event.data - - # process response data - body = event.data.get("body", "") - headers = event.data.get("header-dict", {}) - if body == "" and headers == {}: - return - - self.assigned_cookies = {} - content_type = None - reported_location_header = False - - for header, header_values in headers.items(): - for header_value in header_values: - if header.lower() == "set-cookie": - if "=" not in header_value: - self.debug(f"Cookie found without '=': {header_value}") - continue - else: - cookie_name = header_value.split("=")[0] - cookie_value = header_value.split("=")[1].split(";")[0] - - if self.in_bl(cookie_value) == False: - self.assigned_cookies[cookie_name] = cookie_value - description = f"Set-Cookie Assigned Cookie [{cookie_name}]" - data = { - "host": str(event.host), - "type": "COOKIE", - "name": cookie_name, - "original_value": cookie_value, - "url": self.url_unparse("COOKIE", event.parsed_url), - "description": description, - } - context = f"Excavate noticed a set-cookie header for cookie [{cookie_name}] and emitted a WEB_PARAMETER for it" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) + + if event.type == "HTTP_RESPONSE": + # Harvest GET parameters from URL, if it came directly from the target, and parameter extraction is enabled + if ( + self.parameter_extraction == True + and self.url_querystring_remove == False + and str(event.parent.parent.module) == "TARGET" + ): + self.debug(f"Processing target URL [{urlunparse(event.parsed_url)}] for GET parameters") + for ( + method, + parsed_url, + parameter_name, + original_value, + regex_name, + additional_params, + ) in extract_params_url(event.parsed_url): + if self.in_bl(parameter_name) == False: + description = f"HTTP Extracted Parameter [{parameter_name}] (Target URL)" + data = { + "host": parsed_url.hostname, + "type": "GETPARAM", + "name": parameter_name, + "original_value": original_value, + "url": self.url_unparse("GETPARAM", parsed_url), + "description": description, + "additional_params": additional_params, + } + context = f"Excavate parsed a URL directly from the scan target for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + + data = event.data + + # process response data + body = event.data.get("body", "") + headers = event.data.get("header-dict", {}) + if body == "" and headers == {}: + return + + self.assigned_cookies = {} + content_type = None + reported_location_header = False + + for header, header_values in headers.items(): + for header_value in header_values: + if header.lower() == "set-cookie": + if "=" not in header_value: + self.debug(f"Cookie found without '=': {header_value}") + continue else: - self.debug(f"blocked cookie parameter [{cookie_name}] due to BL match") - if header.lower() == "location": - redirect_location = getattr(event, "redirect_location", "") - if redirect_location: - scheme = self.helpers.is_uri(redirect_location, return_scheme=True) - if scheme in ("http", "https"): - web_spider_distance = getattr(event, "web_spider_distance", 0) - num_redirects = max(getattr(event, "num_redirects", 0), web_spider_distance) - if num_redirects <= self.scan.web_max_redirects: - # we do not want to allow the web_spider_distance to be incremented on redirects, so we do not add spider-danger tag - url_event = self.make_event( - redirect_location, "URL_UNVERIFIED", event, tags="affiliate" - ) - if url_event is not None: - reported_location_header = True - await self.emit_event( - url_event, - context=f'excavate looked in "Location" header and found {url_event.type}: {url_event.data}', + cookie_name = header_value.split("=")[0] + cookie_value = header_value.split("=")[1].split(";")[0] + + if self.in_bl(cookie_value) == False: + self.assigned_cookies[cookie_name] = cookie_value + description = f"Set-Cookie Assigned Cookie [{cookie_name}]" + data = { + "host": str(event.host), + "type": "COOKIE", + "name": cookie_name, + "original_value": cookie_value, + "url": self.url_unparse("COOKIE", event.parsed_url), + "description": description, + } + context = f"Excavate noticed a set-cookie header for cookie [{cookie_name}] and emitted a WEB_PARAMETER for it" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + else: + self.debug(f"blocked cookie parameter [{cookie_name}] due to BL match") + if header.lower() == "location": + redirect_location = getattr(event, "redirect_location", "") + if redirect_location: + scheme = self.helpers.is_uri(redirect_location, return_scheme=True) + if scheme in ("http", "https"): + web_spider_distance = getattr(event, "web_spider_distance", 0) + num_redirects = max(getattr(event, "num_redirects", 0), web_spider_distance) + if num_redirects <= self.scan.web_max_redirects: + # we do not want to allow the web_spider_distance to be incremented on redirects, so we do not add spider-danger tag + url_event = self.make_event( + redirect_location, "URL_UNVERIFIED", event, tags="affiliate" ) + if url_event is not None: + reported_location_header = True + await self.emit_event( + url_event, + context=f'excavate looked in "Location" header and found {url_event.type}: {url_event.data}', + ) + + # Try to extract parameters from the redirect URL + if self.parameter_extraction: + + for ( + method, + parsed_url, + parameter_name, + original_value, + regex_name, + additional_params, + ) in extract_params_location(header_value, event.parsed_url): + if self.in_bl(parameter_name) == False: + description = f"HTTP Extracted Parameter [{parameter_name}] (Location Header)" + data = { + "host": parsed_url.hostname, + "type": "GETPARAM", + "name": parameter_name, + "original_value": original_value, + "url": self.url_unparse("GETPARAM", parsed_url), + "description": description, + "additional_params": additional_params, + } + context = f"Excavate parsed a location header for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + else: + self.warning("location header found but missing redirect_location in HTTP_RESPONSE") + if header.lower() == "content-type": + content_type = headers["content-type"][0] + + await self.search( + body, + event, + content_type, + discovery_context="HTTP response (body)", + ) - # Try to extract parameters from the redirect URL - if self.parameter_extraction: - - for ( - method, - parsed_url, - parameter_name, - original_value, - regex_name, - additional_params, - ) in extract_params_location(header_value, event.parsed_url): - if self.in_bl(parameter_name) == False: - description = f"HTTP Extracted Parameter [{parameter_name}] (Location Header)" - data = { - "host": parsed_url.hostname, - "type": "GETPARAM", - "name": parameter_name, - "original_value": original_value, - "url": self.url_unparse("GETPARAM", parsed_url), - "description": description, - "additional_params": additional_params, - } - context = f"Excavate parsed a location header for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) - else: - self.warning("location header found but missing redirect_location in HTTP_RESPONSE") - if header.lower() == "content-type": - content_type = headers["content-type"][0] - - await self.search( - body, - event, - content_type, - discovery_context="HTTP response (body)", - ) - - if reported_location_header: - # Location header should be removed if we already found and emitted a result. - # Failure to do so results in a race against the same URL extracted by the URLExtractor submodule - # If the extracted URL wins, it will cause the manual one to be a dupe, but it will have a higher web_spider_distance. - headers.pop("location") - headers_str = "\n".join(f"{k}: {v}" for k, values in headers.items() for v in values) - - await self.search( - headers_str, - event, - content_type, - discovery_context="HTTP response (headers)", - ) + if reported_location_header: + # Location header should be removed if we already found and emitted a result. + # Failure to do so results in a race against the same URL extracted by the URLExtractor submodule + # If the extracted URL wins, it will cause the manual one to be a dupe, but it will have a higher web_spider_distance. + headers.pop("location") + headers_str = "\n".join(f"{k}: {v}" for k, values in headers.items() for v in values) + + await self.search( + headers_str, + event, + content_type, + discovery_context="HTTP response (headers)", + ) + else: + await self.search( + event.data, + event, + content_type="", + discovery_context="Parsed file content", + ) diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 4143ea2fd..076ef8bb5 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -102,6 +102,7 @@ async def handle_event(self, event): raw_text_event = self.make_event( content, "RAW_TEXT", + context=f"Extracted text from {file_path}", parent=event, ) await self.emit_event(raw_text_event) From 98b0b3b07d5d374afdd8b528366e6f13e698ada5 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 6 Aug 2024 14:02:54 +0100 Subject: [PATCH 005/124] Add tests and stop newlines from printing in debug logs --- bbot/core/event/base.py | 2 +- .../module_tests/test_module_excavate.py | 106 ++++++++++++++++++ 2 files changed, 107 insertions(+), 1 deletion(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index bcf41a37c..36856f426 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -859,7 +859,7 @@ def __hash__(self): def __str__(self): max_event_len = 80 - d = str(self.data) + d = str(self.data).replace("\n", "\\n") return f'{self.type}("{d[:max_event_len]}{("..." if len(d) > max_event_len else "")}", module={self.module}, tags={self.tags})' def __repr__(self): diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 2e00eacd6..196714092 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -1,7 +1,9 @@ from bbot.modules.base import BaseModule from .base import ModuleTestBase, tempwordlist + from bbot.modules.internal.excavate import ExcavateRule +from pathlib import Path import yara @@ -884,3 +886,107 @@ def check(self, module_test, events): assert found_first_cookie == True assert found_second_cookie == True + + +class TestExcavateRAWTEXT(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["excavate", "httpx", "filedownload", "unstructured"] + config_overrides = {"web": {"spider_distance": 2, "spider_depth": 2}} + + pdf_data = r"""%PDF-1.3 +%���� ReportLab Generated PDF document http://www.reportlab.com +1 0 obj +<< +/F1 2 0 R +>> +endobj +2 0 obj +<< +/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font +>> +endobj +3 0 obj +<< +/Contents 7 0 R /MediaBox [ 0 0 612 792 ] /Parent 6 0 R /Resources << +/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] +>> /Rotate 0 /Trans << + +>> + /Type /Page +>> +endobj +4 0 obj +<< +/PageMode /UseNone /Pages 6 0 R /Type /Catalog +>> +endobj +5 0 obj +<< +/Author (anonymous) /CreationDate (D:20240806124041+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240806124041+00'00') /Producer (ReportLab PDF Library - www.reportlab.com) + /Subject (unspecified) /Title (Test_PDF) /Trapped /False +>> +endobj +6 0 obj +<< +/Count 1 /Kids [ 3 0 R ] /Type /Pages +>> +endobj +7 0 obj +<< +/Filter [ /ASCII85Decode /FlateDecode ] /Length 202 +>> +stream +GarWq\IQJ1&-U?D?SNj'pjaJ`3fp",koMimP^kF+Mp1AE!!NP7"/-^@0_UHNek_O@uN&ABekf('dj)>^i_O#]/&nb$ij2rXK:^*p0G-Zj,3*Bcr&".&MajS22CRM"s<2d?a)N4D54IK3J2J=!)`iqRmbTV\^"I//umgp*!;>/]78#%K7'L2jVEHrg9WjGBH&A~>endstream +endobj +xref +0 8 +0000000000 65535 f +0000000073 00000 n +0000000104 00000 n +0000000211 00000 n +0000000404 00000 n +0000000472 00000 n +0000000768 00000 n +0000000827 00000 n +trailer +<< +/ID +[<197f3c02ab226c58c0edb7d6f675b20d><197f3c02ab226c58c0edb7d6f675b20d>] +% ReportLab generated PDF document -- digest (http://www.reportlab.com) + +/Info 5 0 R +/Root 4 0 R +/Size 8 +>> +startxref +1119 +%%EOF""" + unstructured_response = """Link to an example website https://www.test.notreal/about + +Another link http://localhost:8888/admin_panel.php""" + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests( + dict(uri="/"), + dict(response_data=''), + ) + module_test.set_expect_requests( + dict(uri="/Test_PDF"), + dict(response_data=self.pdf_data, headers={"Content-Type": "application/pdf"}), + ) + + def check(self, module_test, events): + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + assert 1 == len(filesystem_events), filesystem_events + filesystem_event = filesystem_events[0] + file = Path(filesystem_event.data["path"]) + assert file.is_file(), "Destination file doesn't exist" + assert open(file).read() == self.pdf_data, f"File at {file} does not contain the correct content" + raw_text_events = [e for e in events if e.type == "RAW_TEXT"] + assert 1 == len(raw_text_events), "Failed to emmit RAW_TEXT event" + assert ( + raw_text_events[0].data == self.unstructured_response + ), f"Text extracted from PDF is incorrect, got {raw_text_events[0].data}" + event_data = [e.data for e in events] + assert "https://www.test.notreal/about" in event_data + assert "http://localhost:8888/admin_panel.php" in event_data From 430a6312b77a2b1509dddede13faa8cd7ad059f1 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 6 Aug 2024 14:47:25 +0100 Subject: [PATCH 006/124] Include test.notreal and localhost in tests --- bbot/test/test_step_2/module_tests/test_module_excavate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 196714092..492bbb874 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -889,7 +889,7 @@ def check(self, module_test, events): class TestExcavateRAWTEXT(ModuleTestBase): - targets = ["http://127.0.0.1:8888/"] + targets = ["http://127.0.0.1:8888/", "test.notreal", "http://localhost:8888/admin_panel.php"] modules_overrides = ["excavate", "httpx", "filedownload", "unstructured"] config_overrides = {"web": {"spider_distance": 2, "spider_depth": 2}} From bc0dfb6d484d065def23915bf8ab28960c3bad6e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 21:46:30 +0000 Subject: [PATCH 007/124] Bump mkdocs-material from 9.5.26 to 9.5.31 Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.26 to 9.5.31. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.26...9.5.31) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0433b8258..4526f18a3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1243,13 +1243,13 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.26" +version = "9.5.31" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.26-py3-none-any.whl", hash = "sha256:5d01fb0aa1c7946a1e3ae8689aa2b11a030621ecb54894e35aabb74c21016312"}, - {file = "mkdocs_material-9.5.26.tar.gz", hash = "sha256:56aeb91d94cffa43b6296fa4fbf0eb7c840136e563eecfd12c2d9e92e50ba326"}, + {file = "mkdocs_material-9.5.31-py3-none-any.whl", hash = "sha256:1b1f49066fdb3824c1e96d6bacd2d4375de4ac74580b47e79ff44c4d835c5fcb"}, + {file = "mkdocs_material-9.5.31.tar.gz", hash = "sha256:31833ec664772669f5856f4f276bf3fdf0e642a445e64491eda459249c3a1ca8"}, ] [package.dependencies] From 8b80e548460a9e65d1bc7b9cb29bf470cec2f2a7 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 7 Aug 2024 12:20:05 +0100 Subject: [PATCH 008/124] Fix unstructured --- bbot/modules/unstructured.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 076ef8bb5..6dde1f18d 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -69,6 +69,8 @@ class unstructured(BaseModule): deps_apt = ["libmagic-dev", "poppler-utils", "tesseract-ocr", "libreoffice", "pandoc"] deps_pip = ["unstructured[all-docs]"] + scope_distance_modifier = 1 + async def setup(self): self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) self.ignored_folders = self.config.get("ignore_folders", []) @@ -150,8 +152,12 @@ def extract_text(file_path): # If the file can be extracted with unstructured use its partition function or try and read it if any(file_path.lower().endswith(file_type) for file_type in unstructured_file_types): - elements = partition(filename=file_path) - return "\n\n".join(element.text for element in elements) + try: + elements = partition(filename=file_path) + return "\n\n".join(element.text for element in elements) + except ValueError: + with open(file_path, "rb") as file: + return file.read().decode("utf-8", errors="ignore") else: with open(file_path, "rb") as file: return file.read().decode("utf-8", errors="ignore") From a40da6a5c21d23b01658dceeb739366adcf24f63 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 7 Aug 2024 16:10:55 +0100 Subject: [PATCH 009/124] Make `RAW_TEXT` a dict event --- bbot/core/event/base.py | 4 ++++ bbot/modules/internal/excavate.py | 2 +- bbot/modules/unstructured.py | 8 +++++--- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 36856f426..0e81b3fa5 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1435,6 +1435,10 @@ class FILESYSTEM(DictPathEvent): pass +class RAW_TEXT(DictEvent): + pass + + class RAW_DNS_RECORD(DictHostEvent): pass diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 95e8b2166..b0459a718 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -1034,7 +1034,7 @@ async def handle_event(self, event): ) else: await self.search( - event.data, + event.data.get("content", ""), event, content_type="", discovery_context="Parsed file content", diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 6dde1f18d..e485ef676 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -98,11 +98,13 @@ async def handle_event(self, event): ) await self.emit_event(file_event) elif "file" in event.tags: - file_path = event.data["path"] - content = await self.scan.helpers.run_in_executor_mp(extract_text, file_path) + file_path = Path(event.data["path"]) + filename = file_path.name + content = await self.scan.helpers.run_in_executor_mp(extract_text, str(file_path)) + event_data = {"filename": filename, "content": content} if content: raw_text_event = self.make_event( - content, + event_data, "RAW_TEXT", context=f"Extracted text from {file_path}", parent=event, From 0b490f6f6b21896001c305f62d14629646756845 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 7 Aug 2024 16:28:12 +0100 Subject: [PATCH 010/124] Revert "Make `RAW_TEXT` a dict event" This reverts commit a40da6a5c21d23b01658dceeb739366adcf24f63. --- bbot/core/event/base.py | 4 ---- bbot/modules/internal/excavate.py | 2 +- bbot/modules/unstructured.py | 8 +++----- 3 files changed, 4 insertions(+), 10 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 0e81b3fa5..36856f426 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1435,10 +1435,6 @@ class FILESYSTEM(DictPathEvent): pass -class RAW_TEXT(DictEvent): - pass - - class RAW_DNS_RECORD(DictHostEvent): pass diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index b0459a718..95e8b2166 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -1034,7 +1034,7 @@ async def handle_event(self, event): ) else: await self.search( - event.data.get("content", ""), + event.data, event, content_type="", discovery_context="Parsed file content", diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index e485ef676..6dde1f18d 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -98,13 +98,11 @@ async def handle_event(self, event): ) await self.emit_event(file_event) elif "file" in event.tags: - file_path = Path(event.data["path"]) - filename = file_path.name - content = await self.scan.helpers.run_in_executor_mp(extract_text, str(file_path)) - event_data = {"filename": filename, "content": content} + file_path = event.data["path"] + content = await self.scan.helpers.run_in_executor_mp(extract_text, file_path) if content: raw_text_event = self.make_event( - event_data, + content, "RAW_TEXT", context=f"Extracted text from {file_path}", parent=event, From 0fe898e64818ea48ec63f8d270b4cc8402341930 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 7 Aug 2024 16:48:20 +0100 Subject: [PATCH 011/124] Add a check to ensure event is a `dict` --- bbot/modules/internal/excavate.py | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 95e8b2166..ea1bed1b4 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -181,7 +181,12 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte """ for identifier, results in yara_results.items(): for result in results: - event_data = {"host": str(event.host), "url": event.data.get("url", "")} + if isinstance(event.data, dict): + url = event.data.get("url", "") + else: + url = "" + + event_data = {"host": str(event.host), "url": url} event_data["description"] = f"{discovery_context} {yara_rule_settings.description}" if yara_rule_settings.emit_match: event_data["description"] += f" [{result}]" @@ -268,7 +273,12 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier, results in yara_results.items(): for result in results: - event_data = {"host": str(event.host), "url": event.data.get("url", "")} + if isinstance(event.data, dict): + url = event.data.get("url", "") + else: + url = "" + + event_data = {"host": str(event.host), "url": url} description_string = ( f" with description: [{yara_rule_settings.description}]" if yara_rule_settings.description else "" ) @@ -582,9 +592,14 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier in yara_results.keys(): for findings in yara_results[identifier]: + if isinstance(event.data, dict): + url = event.data.get("url", "") + else: + url = "" + event_data = { "host": str(event.host), - "url": event.data.get("url", ""), + "url": url, "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})", } await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") @@ -614,9 +629,14 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier in yara_results.keys(): for findings in yara_results[identifier]: + if isinstance(event.data, dict): + url = event.data.get("url", "") + else: + url = "" + event_data = { "host": str(event.host), - "url": event.data.get("url", ""), + "url": url, "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})", } await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") @@ -696,7 +716,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte final_url = url_str self.excavate.debug(f"Discovered Full URL [{final_url}]") - elif identifier == "url_attr": + elif identifier == "url_attr" and hasattr(event, "parsed_url"): m = await self.helpers.re.search(self.tag_attribute_regex, url_str) if not m: self.excavate.debug( From 6072230efba17c6cc8630c2c7a61f1b360bb9930 Mon Sep 17 00:00:00 2001 From: Colin Stubbs <3059577+colin-stubbs@users.noreply.github.com> Date: Thu, 8 Aug 2024 02:39:25 +1000 Subject: [PATCH 012/124] Add service_record function --- bbot/core/helpers/dns/helpers.py | 66 ++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/bbot/core/helpers/dns/helpers.py b/bbot/core/helpers/dns/helpers.py index 061ed829c..2a94421a7 100644 --- a/bbot/core/helpers/dns/helpers.py +++ b/bbot/core/helpers/dns/helpers.py @@ -59,3 +59,69 @@ def add_result(rdtype, _record): else: log.warning(f'Unknown DNS record type "{rdtype}"') return results + +def service_record(host, rdtype=None): + """ + Indicates that the provided host name and optional rdtype is an SRV or related service record. + + These types of records do/should not have A/AAAA/CNAME or similar records, and are simply used to advertise configuration information and/or policy information for different Internet facing services. + + This function exists to provide a consistent way in which to perform this test, rather than having duplicated code in multiple places in different modules. + + The response provides a way for modules to quickly test whether a host name is relevant and worth inspecting or using in context of what the module does. + + NOTE: While underscores are technically not supposed to exist in DNS names as per RFC's, they can be used, so we can't assume that a name that contains or starts with an underscore is a service record and so must check for specific strings. + + Args: + host (string): A DNS host name + + Returns: + bool: A boolean, True indicates that the host is an SRV or similar record, False indicates that it is not. + + Examples: + >>> service_record('_xmpp._tcp.example.com') + True + + >>> service_record('_custom._service.example.com', 'SRV') + True + + >>> service_record('_dmarc.example.com') + True + + >>> service_record('www.example.com') + False + """ + + # if we were providing an rdtype, check if it is SRV + # NOTE: we don't care what the name is if rdtype == SRV + if rdtype and str(rdtype).upper() == "SRV": + return True + + # we did not receive rdtype, so we'll have to inspect host name parts + parts = str(host).split(".") + + # classic SRV record names, e.g. _ldap._tcp.example.com + if parts[1] == "_udp" or parts[1] == "_tcp": + return True + + # TLS indicating records, used by SMTP TLS-RPT etc, e.g. _smtp._tls.example.com + if parts[1] == "_tls": + return True + + # BIMI TXT records, e.g. selector._bimi.example.com + if parts[1] == "_bimi": + return True + + # DKIM TXT records, e.g. selector._domainkey.example.com + if parts[1] == "_domainkey": + return True + + # DMARC TXT records, e.g. _dmarc.example.com + if parts[0] == "_dmarc": + return True + + # MTA-STS TXT records, e.g. _mta-sts.example.com + if parts[0] == "_mta-sts": + return True + + return False From 46cac6c6e4ce4df1746247f6e9b73d57d9326d05 Mon Sep 17 00:00:00 2001 From: Colin Stubbs <3059577+colin-stubbs@users.noreply.github.com> Date: Thu, 8 Aug 2024 02:48:03 +1000 Subject: [PATCH 013/124] Fix black formatting requirement --- bbot/core/helpers/dns/helpers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bbot/core/helpers/dns/helpers.py b/bbot/core/helpers/dns/helpers.py index 2a94421a7..d7384934d 100644 --- a/bbot/core/helpers/dns/helpers.py +++ b/bbot/core/helpers/dns/helpers.py @@ -60,6 +60,7 @@ def add_result(rdtype, _record): log.warning(f'Unknown DNS record type "{rdtype}"') return results + def service_record(host, rdtype=None): """ Indicates that the provided host name and optional rdtype is an SRV or related service record. From 32cdcbc9fd1a8eade0d05f41396cdd4614bd294a Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 7 Aug 2024 18:31:55 +0100 Subject: [PATCH 014/124] Test *almost* all Excavate rules --- .../module_tests/test_module_excavate.py | 45 ++++++++++++------- 1 file changed, 30 insertions(+), 15 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 492bbb874..8e5ea5d65 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -889,7 +889,7 @@ def check(self, module_test, events): class TestExcavateRAWTEXT(ModuleTestBase): - targets = ["http://127.0.0.1:8888/", "test.notreal", "http://localhost:8888/admin_panel.php"] + targets = ["http://127.0.0.1:8888/", "test.notreal"] modules_overrides = ["excavate", "httpx", "filedownload", "unstructured"] config_overrides = {"web": {"spider_distance": 2, "spider_depth": 2}} @@ -907,7 +907,7 @@ class TestExcavateRAWTEXT(ModuleTestBase): endobj 3 0 obj << -/Contents 7 0 R /MediaBox [ 0 0 612 792 ] /Parent 6 0 R /Resources << +/Contents 7 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 6 0 R /Resources << /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0 /Trans << @@ -922,8 +922,8 @@ class TestExcavateRAWTEXT(ModuleTestBase): endobj 5 0 obj << -/Author (anonymous) /CreationDate (D:20240806124041+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240806124041+00'00') /Producer (ReportLab PDF Library - www.reportlab.com) - /Subject (unspecified) /Title (Test_PDF) /Trapped /False +/Author (anonymous) /CreationDate (D:20240807182842+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240807182842+00'00') /Producer (ReportLab PDF Library - www.reportlab.com) + /Subject (unspecified) /Title (untitled) /Trapped /False >> endobj 6 0 obj @@ -933,10 +933,10 @@ class TestExcavateRAWTEXT(ModuleTestBase): endobj 7 0 obj << -/Filter [ /ASCII85Decode /FlateDecode ] /Length 202 +/Filter [ /ASCII85Decode /FlateDecode ] /Length 742 >> stream -GarWq\IQJ1&-U?D?SNj'pjaJ`3fp",koMimP^kF+Mp1AE!!NP7"/-^@0_UHNek_O@uN&ABekf('dj)>^i_O#]/&nb$ij2rXK:^*p0G-Zj,3*Bcr&".&MajS22CRM"s<2d?a)N4D54IK3J2J=!)`iqRmbTV\^"I//umgp*!;>/]78#%K7'L2jVEHrg9WjGBH&A~>endstream +Gas2F;0/Hc'SYHA/+V9II1V!>b>-epMEjN4$Udfu3WXha!?H`crq_UNGP5IS$'WT'SF]Hm/eEhd_JY>@!1knV$j`L/E!kN:0EQJ+FF:uKph>GV#ju48hu\;DS#c\h,:/udaV^[@;X>;"'ep>>)(B?I-n?2pLTEZKb$BFgKRF(b#Pc?SYeqN_Q<+X%64E)"g-fPCbq][OcNlQLW_hs%Z%g83]3b]0V$sluS:l]fd*^-UdD=#bCpInTen.cfe189iIh6\.p.U0GF:oK9b'->\lOqObp&ppaGMoCcp"4SVDq!<>6ZV]FD>,rrdc't<[N2!Ai12-2bU`S*gNOt?NS4WgtN@KuL)HOb>`9L>S$_ert"UNW*,("+*>]m)4`k"8SUOCpM7`cEe!(7?`JV*GMajff(^atd&EX#qdMBmI'Q(YYb&m.O>0MYJ4XfJH@("`jPF^W5.*84$HY?2JY[WU48,IqkD_]b:_615)BA3RM*]q4>2Gf_1aMGFGu.Zt]!p5h;`XYO/FCmQ4/3ZX09kH$X+QI/JJh`lb\dBu:d$%Ld1=H=-UbKXP_&26H00T.?":f@40#m]NM5JYq@VFSk+#OR+sc4eX`Oq]N([T/;kQ>>WZOJNWnM"#msq:#?Km~>endstream endobj xref 0 8 @@ -944,14 +944,14 @@ class TestExcavateRAWTEXT(ModuleTestBase): 0000000073 00000 n 0000000104 00000 n 0000000211 00000 n -0000000404 00000 n -0000000472 00000 n -0000000768 00000 n -0000000827 00000 n +0000000414 00000 n +0000000482 00000 n +0000000778 00000 n +0000000837 00000 n trailer << /ID -[<197f3c02ab226c58c0edb7d6f675b20d><197f3c02ab226c58c0edb7d6f675b20d>] +[<3c7340500fa2fe72523c5e6f07511599><3c7340500fa2fe72523c5e6f07511599>] % ReportLab generated PDF document -- digest (http://www.reportlab.com) /Info 5 0 R @@ -959,11 +959,17 @@ class TestExcavateRAWTEXT(ModuleTestBase): /Size 8 >> startxref -1119 +1669 %%EOF""" - unstructured_response = """Link to an example website https://www.test.notreal/about + unstructured_response = """This is an email example@blacklanternsecurity.notreal + +An example JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c + +A serialized DOTNET object AAEAAAD/////AQAAAAAAAAAMAgAAAFJTeXN0ZW0uQ29sbGVjdGlvbnMuR2VuZXJpYy5MaXN0YDFbW1N5c3RlbS5TdHJpbmddXSwgU3lzdGVtLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49YjAzZjVmN2YxMWQ1MGFlMwEAAAAIQ29tcGFyZXIQSXRlbUNvdW50AQMAAAAJAwAAAAlTeXN0ZW0uU3RyaW5nW10FAAAACQIAAAAJBAAAAAkFAAAACRcAAAAJCgAAAAkLAAAACQwAAAAJDQAAAAkOAAAACQ8AAAAJEAAAAAkRAAAACRIAAAAJEwAAAA== + +A full url https://www.test.notreal/about -Another link http://localhost:8888/admin_panel.php""" +A href Click me""" async def setup_after_prep(self, module_test): module_test.set_expect_requests( @@ -988,5 +994,14 @@ def check(self, module_test, events): raw_text_events[0].data == self.unstructured_response ), f"Text extracted from PDF is incorrect, got {raw_text_events[0].data}" event_data = [e.data for e in events] + assert "example@blacklanternsecurity.notreal" in event_data + assert ( + "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c" + in event_data + ) + assert ( + "AAEAAAD/////AQAAAAAAAAAMAgAAAFJTeXN0ZW0uQ29sbGVjdGlvbnMuR2VuZXJpYy5MaXN0YDFbW1N5c3RlbS5TdHJpbmddXSwgU3lzdGVtLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49YjAzZjVmN2YxMWQ1MGFlMwEAAAAIQ29tcGFyZXIQSXRlbUNvdW50AQMAAAAJAwAAAAlTeXN0ZW0uU3RyaW5nW10FAAAACQIAAAAJBAAAAAkFAAAACRcAAAAJCgAAAAkLAAAACQwAAAAJDQAAAAkOAAAACQ8AAAAJEAAAAAkRAAAACRIAAAAJEwAAAA==" + in event_data + ) assert "https://www.test.notreal/about" in event_data - assert "http://localhost:8888/admin_panel.php" in event_data + assert "/donot_detect.js" not in event_data From 3b67328ae1135edf4df89f933efe1b8b8c28bab1 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 7 Aug 2024 18:41:55 +0100 Subject: [PATCH 015/124] Fix final_url not defined error --- bbot/modules/internal/excavate.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index ea1bed1b4..ec0c0528a 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -706,6 +706,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte for identifier, results in yara_results.items(): urls_found = 0 + final_url = "" for url_str in results: if identifier == "url_full": if not await self.helpers.re.search(self.full_url_regex, url_str): @@ -734,18 +735,19 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte self.excavate.debug( f"Reconstructed Full URL [{final_url}] from extracted relative URL [{unescaped_url}] " ) - - if self.excavate.scan.in_scope(final_url): - urls_found += 1 - - await self.report( - final_url, - event, - yara_rule_settings, - discovery_context, - event_type="URL_UNVERIFIED", - urls_found=urls_found, - ) + + if final_url: + if self.excavate.scan.in_scope(final_url): + urls_found += 1 + + await self.report( + final_url, + event, + yara_rule_settings, + discovery_context, + event_type="URL_UNVERIFIED", + urls_found=urls_found, + ) async def report_prep(self, event_data, event_type, event, tags, **kwargs): event_draft = self.excavate.make_event(event_data, event_type, parent=event) From 3e6109b469c3d81826528ddf5858f50dd4464ee6 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 7 Aug 2024 18:43:10 +0100 Subject: [PATCH 016/124] lint --- bbot/modules/internal/excavate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index ec0c0528a..a19daf940 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -735,7 +735,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte self.excavate.debug( f"Reconstructed Full URL [{final_url}] from extracted relative URL [{unescaped_url}] " ) - + if final_url: if self.excavate.scan.in_scope(final_url): urls_found += 1 From 2a1fed4ea236844fdfb6ddb5ca4ff72603fbfbff Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 7 Aug 2024 19:29:27 +0100 Subject: [PATCH 017/124] Increase scope distance --- bbot/test/test_step_2/module_tests/test_module_excavate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 8e5ea5d65..7938ccf80 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -891,7 +891,7 @@ def check(self, module_test, events): class TestExcavateRAWTEXT(ModuleTestBase): targets = ["http://127.0.0.1:8888/", "test.notreal"] modules_overrides = ["excavate", "httpx", "filedownload", "unstructured"] - config_overrides = {"web": {"spider_distance": 2, "spider_depth": 2}} + config_overrides = {"scope": {"report_distance": 1}, "web": {"spider_distance": 2, "spider_depth": 2}} pdf_data = r"""%PDF-1.3 %���� ReportLab Generated PDF document http://www.reportlab.com From 52d53f5e813110f2072ec7eab592a31c7cfd5a9d Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 9 Aug 2024 14:39:22 +0100 Subject: [PATCH 018/124] Closes #1642 --- bbot/modules/trufflehog.py | 64 ++++++++++++++++++++++++++++++++------ 1 file changed, 54 insertions(+), 10 deletions(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index e0ff0fd2a..37e4dcbdf 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -3,7 +3,7 @@ class trufflehog(BaseModule): - watched_events = ["FILESYSTEM"] + watched_events = ["CODE_REPOSITORY", "FILESYSTEM"] produced_events = ["FINDING", "VULNERABILITY"] flags = ["passive", "safe", "code-enum"] meta = { @@ -13,14 +13,16 @@ class trufflehog(BaseModule): } options = { - "version": "3.75.1", + "version": "3.81.7", "only_verified": True, "concurrency": 8, + "deleted_forks": False, } options_desc = { "version": "trufflehog version", "only_verified": "Only report credentials that have been verified", "concurrency": "Number of concurrent workers", + "deleted_forks": "Scan for deleted github forks. WARNING: This is SLOW. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours.", } deps_ansible = [ { @@ -39,17 +41,53 @@ class trufflehog(BaseModule): async def setup(self): self.verified = self.config.get("only_verified", True) self.concurrency = int(self.config.get("concurrency", 8)) + + self.deleted_forks = self.config.get("deleted_forks", False) + self.github_token = "" + if self.deleted_forks: + self.warning(f"Deleted forks is enabled. Scanning for deleted forks is slooooooowwwww. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours.") + for module_name in ("github", "github_codesearch", "github_org", "git_clone"): + module_config = self.scan.config.get("modules", {}).get(module_name, {}) + api_key = module_config.get("api_key", "") + if api_key: + self.github_token = api_key + break + + # soft-fail if we don't have a github token as well + if not self.github_token: + self.deleted_forks = False + return None, "A github api_key must be provided to the github modules for deleted forks to be scanned" + return True + + async def filter_event(self, event): + if event.type == "CODE_REPOSITORY": + if self.deleted_forks: + if "git" not in event.tags: + return False, "Module only accepts git CODE_REPOSITORY events" + if "github" not in event.data["url"]: + return False, "Module only accepts github CODE_REPOSITORY events" + else: + return False, "Deleted forks is not enabled" return True async def handle_event(self, event): - path = event.data["path"] description = event.data.get("description", "") - if "git" in event.tags: - module = "git" - elif "docker" in event.tags: - module = "docker" + if event.type == "CODE_REPOSITORY": + path = event.data["url"] + if "git" in event.tags: + module = "github-experimental" + else: + path = event.data["path"] + if "git" in event.tags: + module = "git" + elif "docker" in event.tags: + module = "docker" + else: + module = "filesystem" + if event.type == "CODE_REPOSITORY": + host = event.host else: - module = "filesystem" + host = str(event.parent.host) async for decoder_name, detector_name, raw_result, verified, source_metadata in self.execute_trufflehog( module, path ): @@ -57,7 +95,7 @@ async def handle_event(self, event): data = { "severity": "High", "description": f"Verified Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Secret: [{raw_result}] Details: [{source_metadata}]", - "host": str(event.parent.host), + "host": host, } if description: data["description"] += f" Description: [{description}]" @@ -70,7 +108,7 @@ async def handle_event(self, event): else: data = { "description": f"Potential Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Secret: [{raw_result}] Details: [{source_metadata}]", - "host": str(event.parent.host), + "host": host, } if description: data["description"] += f" Description: [{description}]" @@ -99,6 +137,12 @@ async def execute_trufflehog(self, module, path): elif module == "filesystem": command.append("filesystem") command.append(path) + elif module == "github-experimental": + command.append("github-experimental") + command.append("--repo=" + path) + command.append("--object-discovery") + command.append("--delete-cached-data") + command.append("--token="+ self.github_token) stats_file = self.helpers.tempfile_tail(callback=self.log_trufflehog_status) try: From d7977673a8549896001cf80e91309eacd2d74a3f Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 9 Aug 2024 14:48:25 +0100 Subject: [PATCH 019/124] Lint --- bbot/modules/trufflehog.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index 37e4dcbdf..875aa4f2a 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -45,20 +45,22 @@ async def setup(self): self.deleted_forks = self.config.get("deleted_forks", False) self.github_token = "" if self.deleted_forks: - self.warning(f"Deleted forks is enabled. Scanning for deleted forks is slooooooowwwww. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours.") + self.warning( + f"Deleted forks is enabled. Scanning for deleted forks is slooooooowwwww. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours." + ) for module_name in ("github", "github_codesearch", "github_org", "git_clone"): module_config = self.scan.config.get("modules", {}).get(module_name, {}) api_key = module_config.get("api_key", "") if api_key: self.github_token = api_key break - + # soft-fail if we don't have a github token as well if not self.github_token: self.deleted_forks = False return None, "A github api_key must be provided to the github modules for deleted forks to be scanned" return True - + async def filter_event(self, event): if event.type == "CODE_REPOSITORY": if self.deleted_forks: @@ -142,7 +144,7 @@ async def execute_trufflehog(self, module, path): command.append("--repo=" + path) command.append("--object-discovery") command.append("--delete-cached-data") - command.append("--token="+ self.github_token) + command.append("--token=" + self.github_token) stats_file = self.helpers.tempfile_tail(callback=self.log_trufflehog_status) try: From 45c45c3ef2ef87c1d1e7f0235aef6e51a93b04bd Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 11 Aug 2024 16:58:44 -0400 Subject: [PATCH 020/124] make dnsbrute active --- bbot/modules/dnsbrute.py | 2 +- bbot/modules/dnsbrute_mutations.py | 6 +++++- bbot/modules/dnscommonsrv.py | 3 +-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/bbot/modules/dnsbrute.py b/bbot/modules/dnsbrute.py index 76e2d1804..3b847933c 100644 --- a/bbot/modules/dnsbrute.py +++ b/bbot/modules/dnsbrute.py @@ -2,7 +2,7 @@ class dnsbrute(subdomain_enum): - flags = ["subdomain-enum", "passive", "aggressive"] + flags = ["subdomain-enum", "active", "aggressive"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] meta = { diff --git a/bbot/modules/dnsbrute_mutations.py b/bbot/modules/dnsbrute_mutations.py index 78513fc2d..ef0b7a033 100644 --- a/bbot/modules/dnsbrute_mutations.py +++ b/bbot/modules/dnsbrute_mutations.py @@ -2,7 +2,7 @@ class dnsbrute_mutations(BaseModule): - flags = ["subdomain-enum", "passive", "aggressive", "slow"] + flags = ["subdomain-enum", "active", "aggressive", "slow"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] meta = { @@ -45,6 +45,10 @@ def get_parent_event(self, subdomain): return self.parent_events[parent_host] async def finish(self): + """ + TODO: speed up this loop. + We should see if we can combine multiple runs together instead of running them each individually. + """ found = sorted(self.found.items(), key=lambda x: len(x[-1]), reverse=True) # if we have a lot of rounds to make, don't try mutations on less-populated domains trimmed_found = [] diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index 819e4967b..c039f8958 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -135,7 +135,6 @@ "_afpovertcp._tcp", # 10 "_collab-edge._tls", # 6 "_tcp", # 5 - "_wildcard", # 3 "_client._smtp", # 3 "_udp", # 2 "_tls", # 2 @@ -153,7 +152,7 @@ class dnscommonsrv(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - flags = ["subdomain-enum", "passive", "safe"] + flags = ["subdomain-enum", "active", "safe"] meta = {"description": "Check for common SRV records", "created_date": "2022-05-15", "author": "@TheTechromancer"} dedup_strategy = "lowest_parent" From f0682275e90e911efcba74c8a8981108f398359f Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 11 Aug 2024 17:05:28 -0400 Subject: [PATCH 021/124] fixing tests --- bbot/test/test_step_1/test_cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_1/test_cli.py b/bbot/test/test_step_1/test_cli.py index 52b3867fe..be861b8a2 100644 --- a/bbot/test/test_step_1/test_cli.py +++ b/bbot/test/test_step_1/test_cli.py @@ -268,7 +268,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): result = await cli._main() out, err = capsys.readouterr() assert result == None - assert "| dnsbrute " in out + assert "| chaos " in out assert not "| httpx " in out # list modules by flag + excluded flag @@ -276,7 +276,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): result = await cli._main() out, err = capsys.readouterr() assert result == None - assert "| dnsbrute " in out + assert "| chaos " in out assert not "| httpx " in out # list modules by flag + excluded module From 9c9d87cfe1ebeab6b02e86fb933bc7e2c1dc8a6c Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 11 Aug 2024 17:40:43 -0400 Subject: [PATCH 022/124] more work on tests --- bbot/test/test_step_1/test_presets.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/bbot/test/test_step_1/test_presets.py b/bbot/test/test_step_1/test_presets.py index 768ee3429..4488318b6 100644 --- a/bbot/test/test_step_1/test_presets.py +++ b/bbot/test/test_step_1/test_presets.py @@ -826,8 +826,8 @@ def get_module_flags(p): module_flags = list(get_module_flags(preset)) dnsbrute_flags = preset.preloaded_module("dnsbrute").get("flags", []) assert "subdomain-enum" in dnsbrute_flags - assert "passive" in dnsbrute_flags - assert not "active" in dnsbrute_flags + assert "active" in dnsbrute_flags + assert not "passive" in dnsbrute_flags assert "aggressive" in dnsbrute_flags assert not "safe" in dnsbrute_flags assert "dnsbrute" in [x[0] for x in module_flags] @@ -842,7 +842,8 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], require_flags=["passive"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert "dnsbrute" in [x[0] for x in module_flags] + assert "chaos" in [x[0] for x in module_flags] + assert not "httpx" in [x[0] for x in module_flags] assert all("passive" in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) @@ -852,7 +853,8 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], exclude_flags=["active"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert "dnsbrute" in [x[0] for x in module_flags] + assert "chaos" in [x[0] for x in module_flags] + assert not "httpx" in [x[0] for x in module_flags] assert all("passive" in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) @@ -863,6 +865,7 @@ def get_module_flags(p): assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) assert not "dnsbrute" in [x[0] for x in module_flags] + assert "httpx" in [x[0] for x in module_flags] assert any("passive" in flags for module, flags in module_flags) assert any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) From 6d262040a4416ccdd8dbdf7a27945f5663327fe0 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 11 Aug 2024 23:13:57 -0400 Subject: [PATCH 023/124] allow version updater to trigger tests --- .github/workflows/version_updater.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/version_updater.yaml b/.github/workflows/version_updater.yaml index 4ffebef35..3a67e0c3f 100644 --- a/.github/workflows/version_updater.yaml +++ b/.github/workflows/version_updater.yaml @@ -44,7 +44,7 @@ jobs: if: steps.update-version.outcome == 'success' uses: peter-evans/create-pull-request@v5 with: - token: ${{ secrets.GITHUB_TOKEN }} + token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} commit-message: "Update nuclei" title: "Update nuclei to ${{ env.latest_version }}" body: | @@ -94,7 +94,7 @@ jobs: if: steps.update-version.outcome == 'success' uses: peter-evans/create-pull-request@v5 with: - token: ${{ secrets.GITHUB_TOKEN }} + token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} commit-message: "Update trufflehog" title: "Update trufflehog to ${{ env.latest_version }}" body: | From 50d85cd06afebfe5d6e46bf6e7c2579cdf6ab95a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Aug 2024 04:14:08 +0000 Subject: [PATCH 024/124] Bump lxml from 5.2.2 to 5.3.0 Bumps [lxml](https://github.com/lxml/lxml) from 5.2.2 to 5.3.0. - [Release notes](https://github.com/lxml/lxml/releases) - [Changelog](https://github.com/lxml/lxml/blob/master/CHANGES.txt) - [Commits](https://github.com/lxml/lxml/compare/lxml-5.2.2...lxml-5.3.0) --- updated-dependencies: - dependency-name: lxml dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 284 ++++++++++++++++++++++++++-------------------------- 1 file changed, 140 insertions(+), 144 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6d655f9ab..f366a686a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -871,153 +871,149 @@ files = [ [[package]] name = "lxml" -version = "5.2.2" +version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, - {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, - {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, - {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, - {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, - {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, - {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, - {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, - {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, - {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, - {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, - {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, - {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, - {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, - {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, - {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, - {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, - {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, - {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, ] [package.extras] @@ -1025,7 +1021,7 @@ cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.10)"] +source = ["Cython (>=3.0.11)"] [[package]] name = "markdown" From 661b3ff9eb82ce85f689cc258a49d159992a6816 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Aug 2024 04:14:46 +0000 Subject: [PATCH 025/124] Bump pytest-httpserver from 1.0.11 to 1.1.0 Bumps [pytest-httpserver](https://github.com/csernazs/pytest-httpserver) from 1.0.11 to 1.1.0. - [Release notes](https://github.com/csernazs/pytest-httpserver/releases) - [Changelog](https://github.com/csernazs/pytest-httpserver/blob/master/CHANGES.rst) - [Commits](https://github.com/csernazs/pytest-httpserver/compare/1.0.11...1.1.0) --- updated-dependencies: - dependency-name: pytest-httpserver dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6d655f9ab..339a2d34b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1945,13 +1945,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "pytest-mock (>=3.12)"] [[package]] name = "pytest-httpserver" -version = "1.0.11" +version = "1.1.0" description = "pytest-httpserver is a httpserver for pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_httpserver-1.0.11-py3-none-any.whl", hash = "sha256:87e0017aa563b4fb9688822827495dd84d5dd0cae189ef0fa533ba693471fe6e"}, - {file = "pytest_httpserver-1.0.11.tar.gz", hash = "sha256:834531f278e99a22cf3920bc30fe06784518b742a189628b3b34953604ad83f6"}, + {file = "pytest_httpserver-1.1.0-py3-none-any.whl", hash = "sha256:7ef88be8ed3354b6784daa3daa75a422370327c634053cefb124903fa8d73a41"}, + {file = "pytest_httpserver-1.1.0.tar.gz", hash = "sha256:6b1cb0199e2ed551b1b94d43f096863bbf6ae5bcd7c75c2c06845e5ce2dc8701"}, ] [package.dependencies] From 88ab7c85a823666d40b3147aa3874b210b06bf3e Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 12 Aug 2024 13:10:04 -0400 Subject: [PATCH 026/124] relocate common SRV records, add tests --- bbot/core/helpers/dns/helpers.py | 170 ++++++++++++++++++++++++++++-- bbot/modules/dnscommonsrv.py | 157 +-------------------------- bbot/test/test_step_1/test_dns.py | 16 ++- 3 files changed, 182 insertions(+), 161 deletions(-) diff --git a/bbot/core/helpers/dns/helpers.py b/bbot/core/helpers/dns/helpers.py index d7384934d..c18a2c162 100644 --- a/bbot/core/helpers/dns/helpers.py +++ b/bbot/core/helpers/dns/helpers.py @@ -6,6 +6,154 @@ log = logging.getLogger("bbot.core.helpers.dns") +# the following are the result of a 1-day internet survey to find the top SRV records +# the scan resulted in 36,282 SRV records. the count for each one is shown. +common_srvs = [ + "_sipfederationtls._tcp", # 6909 + "_sip._tls", # 6853 + "_autodiscover._tcp", # 4268 + "_xmpp-server._tcp", # 1437 + "_sip._tcp", # 1193 + "_sips._tcp", # 1183 + "_caldavs._tcp", # 1179 + "_carddavs._tcp", # 1132 + "_caldav._tcp", # 1035 + "_carddav._tcp", # 1024 + "_sip._udp", # 1007 + "_imaps._tcp", # 1007 + "_submission._tcp", # 906 + "_h323cs._tcp", # 846 + "_h323ls._udp", # 782 + "_xmpp-client._tcp", # 689 + "_pop3s._tcp", # 394 + "_jabber._tcp", # 277 + "_imap._tcp", # 267 + "_turn._udp", # 256 + "_pop3._tcp", # 221 + "_ldap._tcp", # 213 + "_smtps._tcp", # 195 + "_sipinternaltls._tcp", # 192 + "_vlmcs._tcp", # 165 + "_kerberos._udp", # 163 + "_kerberos._tcp", # 148 + "_kpasswd._udp", # 128 + "_kpasswd._tcp", # 100 + "_ntp._udp", # 90 + "_gc._tcp", # 73 + "_kerberos-master._udp", # 66 + "_ldap._tcp.dc._msdcs", # 63 + "_matrix._tcp", # 62 + "_smtp._tcp", # 61 + "_stun._udp", # 57 + "_kerberos._tcp.dc._msdcs", # 54 + "_ldap._tcp.gc._msdcs", # 49 + "_kerberos-adm._tcp", # 44 + "_ldap._tcp.pdc._msdcs", # 43 + "_kerberos-master._tcp", # 43 + "_http._tcp", # 37 + "_h323rs._tcp", # 36 + "_sipinternal._tcp", # 35 + "_turn._tcp", # 33 + "_stun._tcp", # 33 + "_h323ls._tcp", # 33 + "_x-puppet._tcp", # 30 + "_h323cs._udp", # 27 + "_stuns._tcp", # 26 + "_jabber-client._tcp", # 25 + "_x-puppet-ca._tcp", # 22 + "_ts3._udp", # 22 + "_minecraft._tcp", # 22 + "_turns._tcp", # 21 + "_ldaps._tcp", # 21 + "_xmpps-client._tcp", # 20 + "_https._tcp", # 19 + "_ftp._tcp", # 19 + "_xmpp-server._udp", # 18 + "_xmpp-client._udp", # 17 + "_jabber._udp", # 17 + "_jabber-client._udp", # 17 + "_xmpps-server._tcp", # 15 + "_finger._tcp", # 14 + "_stuns._udp", # 12 + "_hkp._tcp", # 12 + "_vlmcs._udp", # 11 + "_turns._udp", # 11 + "_tftp._udp", # 11 + "_ssh._tcp", # 11 + "_rtps._udp", # 11 + "_mysqlsrv._tcp", # 11 + "_hkps._tcp", # 11 + "_h323be._udp", # 11 + "_dns._tcp", # 11 + "_wss._tcp", # 10 + "_wpad._tcp", # 10 + "_whois._tcp", # 10 + "_webexconnect._tcp", # 10 + "_webexconnects._tcp", # 10 + "_vnc._tcp", # 10 + "_test._tcp", # 10 + "_telnet._tcp", # 10 + "_telnets._tcp", # 10 + "_teamspeak._tcp", # 10 + "_svns._tcp", # 10 + "_svcp._tcp", # 10 + "_smb._tcp", # 10 + "_sip-tls._tcp", # 10 + "_sftp._tcp", # 10 + "_secure-pop3._tcp", # 10 + "_secure-imap._tcp", # 10 + "_rtsp._tcp", # 10 + "_rtps._tcp", # 10 + "_rpc._tcp", # 10 + "_rfb._tcp", # 10 + "_raop._tcp", # 10 + "_pstn._tcp", # 10 + "_presence._tcp", # 10 + "_pkixrep._tcp", # 10 + "_pgprevokations._tcp", # 10 + "_pgpkeys._tcp", # 10 + "_ocsp._tcp", # 10 + "_nntp._tcp", # 10 + "_nfs._tcp", # 10 + "_netbios-ssn._tcp", # 10 + "_netbios-ns._tcp", # 10 + "_netbios-dgm._tcp", # 10 + "_mumble._tcp", # 10 + "_msrpc._tcp", # 10 + "_mqtts._tcp", # 10 + "_minecraft._udp", # 10 + "_iscsi._tcp", # 10 + "_ircs._tcp", # 10 + "_ipp._tcp", # 10 + "_ipps._tcp", # 10 + "_h323be._tcp", # 10 + "_gits._tcp", # 10 + "_ftps._tcp", # 10 + "_ftpes._tcp", # 10 + "_dnss._udp", # 10 + "_dnss._tcp", # 10 + "_diameter._tcp", # 10 + "_crl._tcp", # 10 + "_crls._tcp", # 10 + "_cmp._tcp", # 10 + "_certificates._tcp", # 10 + "_aix._tcp", # 10 + "_afpovertcp._tcp", # 10 + "_collab-edge._tls", # 6 + "_tcp", # 5 + "_client._smtp", # 3 + "_udp", # 2 + "_tls", # 2 + "_msdcs", # 2 + "_gc._msdcs", # 2 + "_ldaps._tcp.dc._msdcs", # 1 + "_kerberos._tcp.kdc._msdcs", # 1 + "_kerberos.tcp.dc._msdcs", # 1 + "_imap", # 1 + "_iax", # 1 +] + + def extract_targets(record): """ Extracts hostnames or IP addresses from a given DNS record. @@ -101,6 +249,20 @@ def service_record(host, rdtype=None): # we did not receive rdtype, so we'll have to inspect host name parts parts = str(host).split(".") + if not parts: + return False + + # DMARC TXT records, e.g. _dmarc.example.com + if parts[0] == "_dmarc": + return True + + # MTA-STS TXT records, e.g. _mta-sts.example.com + if parts[0] == "_mta-sts": + return True + + if len(parts) < 2: + return False + # classic SRV record names, e.g. _ldap._tcp.example.com if parts[1] == "_udp" or parts[1] == "_tcp": return True @@ -117,12 +279,4 @@ def service_record(host, rdtype=None): if parts[1] == "_domainkey": return True - # DMARC TXT records, e.g. _dmarc.example.com - if parts[0] == "_dmarc": - return True - - # MTA-STS TXT records, e.g. _mta-sts.example.com - if parts[0] == "_mta-sts": - return True - return False diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index 819e4967b..5fb279b91 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -1,160 +1,12 @@ +from bbot.core.helpers.dns.helpers import common_srvs from bbot.modules.templates.subdomain_enum import subdomain_enum -# the following are the result of a 1-day internet survey to find the top SRV records -# the scan resulted in 36,282 SRV records. the count for each one is shown. -common_srvs = [ - "_sipfederationtls._tcp", # 6909 - "_sip._tls", # 6853 - "_autodiscover._tcp", # 4268 - "_xmpp-server._tcp", # 1437 - "_sip._tcp", # 1193 - "_sips._tcp", # 1183 - "_caldavs._tcp", # 1179 - "_carddavs._tcp", # 1132 - "_caldav._tcp", # 1035 - "_carddav._tcp", # 1024 - "_sip._udp", # 1007 - "_imaps._tcp", # 1007 - "_submission._tcp", # 906 - "_h323cs._tcp", # 846 - "_h323ls._udp", # 782 - "_xmpp-client._tcp", # 689 - "_pop3s._tcp", # 394 - "_jabber._tcp", # 277 - "_imap._tcp", # 267 - "_turn._udp", # 256 - "_pop3._tcp", # 221 - "_ldap._tcp", # 213 - "_smtps._tcp", # 195 - "_sipinternaltls._tcp", # 192 - "_vlmcs._tcp", # 165 - "_kerberos._udp", # 163 - "_kerberos._tcp", # 148 - "_kpasswd._udp", # 128 - "_kpasswd._tcp", # 100 - "_ntp._udp", # 90 - "_gc._tcp", # 73 - "_kerberos-master._udp", # 66 - "_ldap._tcp.dc._msdcs", # 63 - "_matrix._tcp", # 62 - "_smtp._tcp", # 61 - "_stun._udp", # 57 - "_kerberos._tcp.dc._msdcs", # 54 - "_ldap._tcp.gc._msdcs", # 49 - "_kerberos-adm._tcp", # 44 - "_ldap._tcp.pdc._msdcs", # 43 - "_kerberos-master._tcp", # 43 - "_http._tcp", # 37 - "_h323rs._tcp", # 36 - "_sipinternal._tcp", # 35 - "_turn._tcp", # 33 - "_stun._tcp", # 33 - "_h323ls._tcp", # 33 - "_x-puppet._tcp", # 30 - "_h323cs._udp", # 27 - "_stuns._tcp", # 26 - "_jabber-client._tcp", # 25 - "_x-puppet-ca._tcp", # 22 - "_ts3._udp", # 22 - "_minecraft._tcp", # 22 - "_turns._tcp", # 21 - "_ldaps._tcp", # 21 - "_xmpps-client._tcp", # 20 - "_https._tcp", # 19 - "_ftp._tcp", # 19 - "_xmpp-server._udp", # 18 - "_xmpp-client._udp", # 17 - "_jabber._udp", # 17 - "_jabber-client._udp", # 17 - "_xmpps-server._tcp", # 15 - "_finger._tcp", # 14 - "_stuns._udp", # 12 - "_hkp._tcp", # 12 - "_vlmcs._udp", # 11 - "_turns._udp", # 11 - "_tftp._udp", # 11 - "_ssh._tcp", # 11 - "_rtps._udp", # 11 - "_mysqlsrv._tcp", # 11 - "_hkps._tcp", # 11 - "_h323be._udp", # 11 - "_dns._tcp", # 11 - "_wss._tcp", # 10 - "_wpad._tcp", # 10 - "_whois._tcp", # 10 - "_webexconnect._tcp", # 10 - "_webexconnects._tcp", # 10 - "_vnc._tcp", # 10 - "_test._tcp", # 10 - "_telnet._tcp", # 10 - "_telnets._tcp", # 10 - "_teamspeak._tcp", # 10 - "_svns._tcp", # 10 - "_svcp._tcp", # 10 - "_smb._tcp", # 10 - "_sip-tls._tcp", # 10 - "_sftp._tcp", # 10 - "_secure-pop3._tcp", # 10 - "_secure-imap._tcp", # 10 - "_rtsp._tcp", # 10 - "_rtps._tcp", # 10 - "_rpc._tcp", # 10 - "_rfb._tcp", # 10 - "_raop._tcp", # 10 - "_pstn._tcp", # 10 - "_presence._tcp", # 10 - "_pkixrep._tcp", # 10 - "_pgprevokations._tcp", # 10 - "_pgpkeys._tcp", # 10 - "_ocsp._tcp", # 10 - "_nntp._tcp", # 10 - "_nfs._tcp", # 10 - "_netbios-ssn._tcp", # 10 - "_netbios-ns._tcp", # 10 - "_netbios-dgm._tcp", # 10 - "_mumble._tcp", # 10 - "_msrpc._tcp", # 10 - "_mqtts._tcp", # 10 - "_minecraft._udp", # 10 - "_iscsi._tcp", # 10 - "_ircs._tcp", # 10 - "_ipp._tcp", # 10 - "_ipps._tcp", # 10 - "_h323be._tcp", # 10 - "_gits._tcp", # 10 - "_ftps._tcp", # 10 - "_ftpes._tcp", # 10 - "_dnss._udp", # 10 - "_dnss._tcp", # 10 - "_diameter._tcp", # 10 - "_crl._tcp", # 10 - "_crls._tcp", # 10 - "_cmp._tcp", # 10 - "_certificates._tcp", # 10 - "_aix._tcp", # 10 - "_afpovertcp._tcp", # 10 - "_collab-edge._tls", # 6 - "_tcp", # 5 - "_wildcard", # 3 - "_client._smtp", # 3 - "_udp", # 2 - "_tls", # 2 - "_msdcs", # 2 - "_gc._msdcs", # 2 - "_ldaps._tcp.dc._msdcs", # 1 - "_kerberos._tcp.kdc._msdcs", # 1 - "_kerberos.tcp.dc._msdcs", # 1 - "_imap", # 1 - "_iax", # 1 -] -num_srvs = len(common_srvs) - class dnscommonsrv(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Check for common SRV records", "created_date": "2022-05-15", "author": "@TheTechromancer"} + meta = {"description": "", "created_date": "2022-05-15", "author": "@TheTechromancer"} dedup_strategy = "lowest_parent" options = {"max_depth": 2} @@ -162,6 +14,7 @@ class dnscommonsrv(subdomain_enum): async def setup(self): self.max_subdomain_depth = self.config.get("max_depth", 2) + self.num_srvs = len(common_srvs) return True async def filter_event(self, event): @@ -172,11 +25,11 @@ async def filter_event(self, event): async def handle_event(self, event): query = self.make_query(event) - self.verbose(f'Brute-forcing {num_srvs:,} SRV records for "{query}"') + self.verbose(f'Brute-forcing {self.num_srvs:,} SRV records for "{query}"') for hostname in await self.helpers.dns.brute(self, query, common_srvs, type="SRV"): await self.emit_event( hostname, "DNS_NAME", parent=event, - context=f'{{module}} tried {num_srvs:,} common SRV records against "{query}" and found {{event.type}}: {{event.data}}', + context=f'{{module}} tried {self.num_srvs:,} common SRV records against "{query}" and found {{event.type}}: {{event.data}}', ) diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py index 5f98f4939..58e877b9f 100644 --- a/bbot/test/test_step_1/test_dns.py +++ b/bbot/test/test_step_1/test_dns.py @@ -1,6 +1,6 @@ from ..bbot_fixtures import * -from bbot.core.helpers.dns.helpers import extract_targets +from bbot.core.helpers.dns.helpers import extract_targets, service_record, common_srvs mock_records = { @@ -438,6 +438,7 @@ async def handle_event(self, event): ) +<<<<<<< Updated upstream @pytest.mark.asyncio async def test_dns_graph_structure(bbot_scanner): scan = bbot_scanner("https://evilcorp.com", config={"dns": {"search_distance": 1, "minimal": False}}) @@ -464,3 +465,16 @@ async def test_dns_graph_structure(bbot_scanner): assert str(events_by_data["www.evilcorp.com"].module) == "CNAME" assert events_by_data["evilcorp.com"].parent.data == "https://evilcorp.com/" assert str(events_by_data["evilcorp.com"].module) == "host" +======= +def test_dns_helpers(): + assert service_record("") == False + assert service_record("localhost") == False + assert service_record("www.example.com") == False + assert service_record("www.example.com", "SRV") == True + assert service_record("_custom._service.example.com", "SRV") == True + assert service_record("_custom._service.example.com", "A") == False + # top 100 most common SRV records + for srv_record in common_srvs[:100]: + hostname = f"{srv_record}.example.com" + assert service_record(hostname) == True +>>>>>>> Stashed changes From 098f0c12ed2807d6612197e478346410bfbc4ceb Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 12 Aug 2024 13:13:08 -0400 Subject: [PATCH 027/124] fix conflict --- bbot/test/test_step_1/test_dns.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py index 58e877b9f..b10fcc544 100644 --- a/bbot/test/test_step_1/test_dns.py +++ b/bbot/test/test_step_1/test_dns.py @@ -438,7 +438,6 @@ async def handle_event(self, event): ) -<<<<<<< Updated upstream @pytest.mark.asyncio async def test_dns_graph_structure(bbot_scanner): scan = bbot_scanner("https://evilcorp.com", config={"dns": {"search_distance": 1, "minimal": False}}) @@ -465,7 +464,8 @@ async def test_dns_graph_structure(bbot_scanner): assert str(events_by_data["www.evilcorp.com"].module) == "CNAME" assert events_by_data["evilcorp.com"].parent.data == "https://evilcorp.com/" assert str(events_by_data["evilcorp.com"].module) == "host" -======= + + def test_dns_helpers(): assert service_record("") == False assert service_record("localhost") == False @@ -477,4 +477,3 @@ def test_dns_helpers(): for srv_record in common_srvs[:100]: hostname = f"{srv_record}.example.com" assert service_record(hostname) == True ->>>>>>> Stashed changes From 2559686beabbb6c9faf45413d4dd473714a285cc Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 12 Aug 2024 14:21:02 -0400 Subject: [PATCH 028/124] reimplement event confidence --- bbot/core/event/base.py | 35 ++++++++++++++++++++++------ bbot/test/bbot_fixtures.py | 12 +++++----- bbot/test/test_step_1/test_events.py | 35 ++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+), 13 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 9a5a9b869..d19f2e7d1 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -127,7 +127,7 @@ def __init__( scan=None, scans=None, tags=None, - confidence=5, + confidence=100, timestamp=None, _dummy=False, _internal=None, @@ -146,7 +146,7 @@ def __init__( scan (Scan, optional): BBOT Scan object. Required unless _dummy is True. Defaults to None. scans (list of Scan, optional): BBOT Scan objects, used primarily when unserializing an Event from the database. Defaults to None. tags (list of str, optional): Descriptive tags for the event. Defaults to None. - confidence (int, optional): Confidence level for the event, on a scale of 1-10. Defaults to 5. + confidence (int, optional): Confidence level for the event, on a scale of 1-100. Defaults to 100. timestamp (datetime, optional): Time of event discovery. Defaults to current UTC time. _dummy (bool, optional): If True, disables certain data validations. Defaults to False. _internal (Any, optional): If specified, makes the event internal. Defaults to None. @@ -237,6 +237,27 @@ def __init__( def data(self): return self._data + @property + def confidence(self): + return self._confidence + + @confidence.setter + def confidence(self, confidence): + self._confidence = min(100, max(1, int(confidence))) + + @property + def cumulative_confidence(self): + """ + Considers the confidence of parent events. This is useful for filtering out speculative/unreliable events. + + E.g. an event with a confidence of 50 whose parent is also 50 would have a cumulative confidence of 25. + + A confidence of 100 will reset the cumulative confidence to 100. + """ + if self._confidence == 100 or self.parent is None or self.parent is self: + return self._confidence + return int(self._confidence * self.parent.cumulative_confidence / 100) + @property def resolved_hosts(self): if is_ip(self.host): @@ -359,7 +380,7 @@ def discovery_path(self): This event's full discovery context, including those of all its parents """ parent_path = [] - if self.parent is not None and self != self.parent: + if self.parent is not None and self.parent is not self: parent_path = self.parent.discovery_path return parent_path + [[self.id, self.discovery_context]] @@ -463,7 +484,7 @@ def scope_distance(self, scope_distance): self._scope_distance = new_scope_distance # apply recursively to parent events parent_scope_distance = getattr(self.parent, "scope_distance", None) - if parent_scope_distance is not None and self != self.parent: + if parent_scope_distance is not None and self.parent is not self: self.parent.scope_distance = scope_distance + 1 @property @@ -1464,7 +1485,7 @@ def make_event( scan=None, scans=None, tags=None, - confidence=5, + confidence=100, dummy=False, internal=None, ): @@ -1484,7 +1505,7 @@ def make_event( scan (Scan, optional): BBOT Scan object associated with the event. scans (List[Scan], optional): Multiple BBOT Scan objects, primarily used for unserialization. tags (Union[str, List[str]], optional): Descriptive tags for the event, as a list or a single string. - confidence (int, optional): Confidence level for the event, on a scale of 1-10. Defaults to 5. + confidence (int, optional): Confidence level for the event, on a scale of 1-100. Defaults to 100. dummy (bool, optional): Disables data validations if set to True. Defaults to False. internal (Any, optional): Makes the event internal if set to True. Defaults to None. @@ -1613,7 +1634,7 @@ def event_from_json(j, siem_friendly=False): "event_type": event_type, "scans": j.get("scans", []), "tags": j.get("tags", []), - "confidence": j.get("confidence", 5), + "confidence": j.get("confidence", 100), "context": j.get("discovery_context", None), "dummy": True, } diff --git a/bbot/test/bbot_fixtures.py b/bbot/test/bbot_fixtures.py index 1c9631fac..86110a6cb 100644 --- a/bbot/test/bbot_fixtures.py +++ b/bbot/test/bbot_fixtures.py @@ -208,9 +208,9 @@ class bbot_events: return bbot_events -@pytest.fixture(scope="session", autouse=True) -def install_all_python_deps(): - deps_pip = set() - for module in DEFAULT_PRESET.module_loader.preloaded().values(): - deps_pip.update(set(module.get("deps", {}).get("pip", []))) - subprocess.run([sys.executable, "-m", "pip", "install"] + list(deps_pip)) +# @pytest.fixture(scope="session", autouse=True) +# def install_all_python_deps(): +# deps_pip = set() +# for module in DEFAULT_PRESET.module_loader.preloaded().values(): +# deps_pip.update(set(module.get("deps", {}).get("pip", []))) +# subprocess.run([sys.executable, "-m", "pip", "install"] + list(deps_pip)) diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index c319559d4..8ac8eb440 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -771,3 +771,38 @@ async def test_event_web_spider_distance(bbot_scanner): assert url_event_5.web_spider_distance == 1 assert "spider-danger" in url_event_5.tags assert not "spider-max" in url_event_5.tags + + +def test_event_confidence(): + scan = Scanner() + # default 100 + event1 = scan.make_event("evilcorp.com", "DNS_NAME", dummy=True) + assert event1.confidence == 100 + assert event1.cumulative_confidence == 100 + # custom confidence + event2 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=90, dummy=True) + assert event2.confidence == 90 + assert event2.cumulative_confidence == 90 + # max 100 + event3 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=999, dummy=True) + assert event3.confidence == 100 + assert event3.cumulative_confidence == 100 + # min 1 + event4 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=0, dummy=True) + assert event4.confidence == 1 + assert event4.cumulative_confidence == 1 + # first event in chain + event5 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=90, parent=scan.root_event) + assert event5.confidence == 90 + assert event5.cumulative_confidence == 90 + # compounding confidence + event6 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=50, parent=event5) + assert event6.confidence == 50 + assert event6.cumulative_confidence == 45 + event7 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=50, parent=event6) + assert event7.confidence == 50 + assert event7.cumulative_confidence == 22 + # 100 confidence resets + event8 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=100, parent=event7) + assert event8.confidence == 100 + assert event8.cumulative_confidence == 22 From ba359d0f21af7bee6a3b1868a097954a9ea7c675 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 12 Aug 2024 14:21:52 -0400 Subject: [PATCH 029/124] update docs --- docs/dev/helpers/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/dev/helpers/index.md b/docs/dev/helpers/index.md index d34a10d4a..60d64f793 100644 --- a/docs/dev/helpers/index.md +++ b/docs/dev/helpers/index.md @@ -18,7 +18,7 @@ class MyModule(BaseModule): self.hugesuccess(str(ip)) # Execute shell command - completed_process = self.run_process("ls", "-l") + completed_process = await self.run_process("ls", "-l") self.hugesuccess(completed_process.stdout) # Split a DNS name into subdomain / domain From 898c47a42496afbd613f3e3aa709c21aed831349 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 12 Aug 2024 14:27:24 -0400 Subject: [PATCH 030/124] flake --- bbot/test/bbot_fixtures.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bbot/test/bbot_fixtures.py b/bbot/test/bbot_fixtures.py index 86110a6cb..1c9631fac 100644 --- a/bbot/test/bbot_fixtures.py +++ b/bbot/test/bbot_fixtures.py @@ -208,9 +208,9 @@ class bbot_events: return bbot_events -# @pytest.fixture(scope="session", autouse=True) -# def install_all_python_deps(): -# deps_pip = set() -# for module in DEFAULT_PRESET.module_loader.preloaded().values(): -# deps_pip.update(set(module.get("deps", {}).get("pip", []))) -# subprocess.run([sys.executable, "-m", "pip", "install"] + list(deps_pip)) +@pytest.fixture(scope="session", autouse=True) +def install_all_python_deps(): + deps_pip = set() + for module in DEFAULT_PRESET.module_loader.preloaded().values(): + deps_pip.update(set(module.get("deps", {}).get("pip", []))) + subprocess.run([sys.executable, "-m", "pip", "install"] + list(deps_pip)) From 64b60728c9f8c234ec6104cc3093689db83899ae Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 12 Aug 2024 14:49:16 -0400 Subject: [PATCH 031/124] fix tests --- bbot/test/test_step_1/test_events.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 8ac8eb440..fede1977e 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -805,4 +805,4 @@ def test_event_confidence(): # 100 confidence resets event8 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=100, parent=event7) assert event8.confidence == 100 - assert event8.cumulative_confidence == 22 + assert event8.cumulative_confidence == 100 From 70eba7391234dc01faf9e295a83aae338a5a9476 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 12 Aug 2024 16:45:15 -0400 Subject: [PATCH 032/124] add path attribute to findings/vulns, accept blank host --- bbot/core/event/base.py | 33 +++++++++++++++++++++++----- bbot/test/test_step_1/test_events.py | 28 +++++++++++++++++++++++ 2 files changed, 56 insertions(+), 5 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 9a5a9b869..8a39e64c4 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -314,6 +314,15 @@ def host_original(self): return self.host return self._host_original + @property + def closest_host(self): + """ + Walk up the chain of parents events until we hit the first one with a host + """ + if self.host is not None or self.parent is None or self.parent is self: + return self.host + return self.parent.closest_host + @property def port(self): self.host @@ -572,7 +581,7 @@ def get_parents(self, omit=False, include_self=False): return parents def _host(self): - return "" + return None def _sanitize_data(self, data): """ @@ -923,6 +932,18 @@ def _host(self): return make_ip_type(parsed.hostname) +class ClosestHostEvent(DictHostEvent): + # if a host isn't specified, this event type uses the host from the closest parent + # inherited by FINDING and VULNERABILITY + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if "host" not in self.data: + closest_host = self.closest_host + if closest_host is None: + raise ValueError("No host was found in event parents. Host must be specified!") + self.data["host"] = str(closest_host) + + class DictPathEvent(DictEvent): _path_keywords = ["path", "filename"] @@ -1300,7 +1321,7 @@ def redirect_location(self): return location -class VULNERABILITY(DictHostEvent): +class VULNERABILITY(ClosestHostEvent): _always_emit = True _quick_emit = True severity_colors = { @@ -1316,10 +1337,11 @@ def sanitize_data(self, data): return data class _data_validator(BaseModel): - host: str + host: Optional[str] = None severity: str description: str url: Optional[str] = None + path: Optional[str] = None _validate_url = field_validator("url")(validators.validate_url) _validate_host = field_validator("host")(validators.validate_host) _validate_severity = field_validator("severity")(validators.validate_severity) @@ -1328,14 +1350,15 @@ def _pretty_string(self): return f'[{self.data["severity"]}] {self.data["description"]}' -class FINDING(DictHostEvent): +class FINDING(ClosestHostEvent): _always_emit = True _quick_emit = True class _data_validator(BaseModel): - host: str + host: Optional[str] = None description: str url: Optional[str] = None + path: Optional[str] = None _validate_url = field_validator("url")(validators.validate_url) _validate_host = field_validator("host")(validators.validate_host) diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index c319559d4..83f2bf78b 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -771,3 +771,31 @@ async def test_event_web_spider_distance(bbot_scanner): assert url_event_5.web_spider_distance == 1 assert "spider-danger" in url_event_5.tags assert not "spider-max" in url_event_5.tags + + +def test_event_closest_host(): + scan = Scanner() + event1 = scan.make_event("evilcorp.com", "DNS_NAME", parent=scan.root_event) + assert event1.host == "evilcorp.com" + assert event1.closest_host == "evilcorp.com" + event2 = scan.make_event("wat", "ASDF", parent=event1) + assert event2.host == None + assert event2.closest_host == "evilcorp.com" + finding = scan.make_event({"path": "/tmp/asdf.txt", "description": "test"}, "FINDING", parent=event2) + assert finding.data["host"] == "evilcorp.com" + assert finding.host == "evilcorp.com" + vuln = scan.make_event( + {"path": "/tmp/asdf.txt", "description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event2 + ) + assert vuln.data["host"] == "evilcorp.com" + assert vuln.host == "evilcorp.com" + + # no host + event3 = scan.make_event("wat", "ASDF", parent=scan.root_event) + assert event3.host == None + with pytest.raises(ValueError): + finding = scan.make_event({"path": "/tmp/asdf.txt", "description": "test"}, "FINDING", parent=event3) + with pytest.raises(ValueError): + vuln = scan.make_event( + {"path": "/tmp/asdf.txt", "description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event3 + ) From ad55c2eb0919ba9e3b44ac07281d97954edfb930 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 12 Aug 2024 16:46:22 -0400 Subject: [PATCH 033/124] comment tests --- bbot/test/test_step_1/test_events.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 83f2bf78b..481feb744 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -775,22 +775,26 @@ async def test_event_web_spider_distance(bbot_scanner): def test_event_closest_host(): scan = Scanner() + # first event has a host event1 = scan.make_event("evilcorp.com", "DNS_NAME", parent=scan.root_event) assert event1.host == "evilcorp.com" assert event1.closest_host == "evilcorp.com" + # second event has no host event2 = scan.make_event("wat", "ASDF", parent=event1) assert event2.host == None assert event2.closest_host == "evilcorp.com" + # finding automatically uses the host from the first event finding = scan.make_event({"path": "/tmp/asdf.txt", "description": "test"}, "FINDING", parent=event2) assert finding.data["host"] == "evilcorp.com" assert finding.host == "evilcorp.com" + # same with vuln vuln = scan.make_event( {"path": "/tmp/asdf.txt", "description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event2 ) assert vuln.data["host"] == "evilcorp.com" assert vuln.host == "evilcorp.com" - # no host + # no host == not allowed event3 = scan.make_event("wat", "ASDF", parent=scan.root_event) assert event3.host == None with pytest.raises(ValueError): From b40cef2d3ea07e968c0b054bceadfa0651437d50 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 12 Aug 2024 21:03:23 -0400 Subject: [PATCH 034/124] fix description --- bbot/modules/dnscommonsrv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index 5fb279b91..4d804852b 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -6,7 +6,7 @@ class dnscommonsrv(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "", "created_date": "2022-05-15", "author": "@TheTechromancer"} + meta = {"description": "Check for common SRV records", "created_date": "2022-05-15", "author": "@TheTechromancer"} dedup_strategy = "lowest_parent" options = {"max_depth": 2} From 2df980aa7a78a69674f191878ab316c522d9728c Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 14 Aug 2024 12:30:06 +0100 Subject: [PATCH 035/124] Tagged modules with `code-enum` --- bbot/modules/docker_pull.py | 2 +- bbot/modules/git_clone.py | 2 +- bbot/modules/github_workflows.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bbot/modules/docker_pull.py b/bbot/modules/docker_pull.py index 987651fcd..0d1f63c29 100644 --- a/bbot/modules/docker_pull.py +++ b/bbot/modules/docker_pull.py @@ -8,7 +8,7 @@ class docker_pull(BaseModule): watched_events = ["CODE_REPOSITORY"] produced_events = ["FILESYSTEM"] - flags = ["passive", "safe", "slow"] + flags = ["passive", "safe", "slow", "code-enum"] meta = { "description": "Download images from a docker repository", "created_date": "2024-03-24", diff --git a/bbot/modules/git_clone.py b/bbot/modules/git_clone.py index 6cda79f9d..dbf24e91e 100644 --- a/bbot/modules/git_clone.py +++ b/bbot/modules/git_clone.py @@ -6,7 +6,7 @@ class git_clone(github): watched_events = ["CODE_REPOSITORY"] produced_events = ["FILESYSTEM"] - flags = ["passive", "safe", "slow"] + flags = ["passive", "safe", "slow", "code-enum"] meta = { "description": "Clone code github repositories", "created_date": "2024-03-08", diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 15767ca63..d51da905e 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -7,7 +7,7 @@ class github_workflows(github): watched_events = ["CODE_REPOSITORY"] produced_events = ["FILESYSTEM"] - flags = ["passive", "safe"] + flags = ["passive", "safe", "code-enum"] meta = { "description": "Download a github repositories workflow logs", "created_date": "2024-04-29", From f6c533ab7c696c575d357b7aeb8b885086c6a615 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 14 Aug 2024 17:37:31 -0400 Subject: [PATCH 036/124] better engine debugging --- bbot/core/engine.py | 70 ++++++++++++++++++--------------- bbot/core/helpers/dns/dns.py | 3 +- bbot/core/helpers/dns/engine.py | 4 +- bbot/core/helpers/web/engine.py | 4 +- bbot/core/helpers/web/web.py | 6 ++- bbot/defaults.yml | 5 +++ bbot/test/test.conf | 2 + 7 files changed, 56 insertions(+), 38 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index 70652d456..06498d259 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -40,9 +40,10 @@ class EngineBase: ERROR_CLASS = BBOTEngineError - def __init__(self): + def __init__(self, debug=False): self._shutdown_status = False self.log = logging.getLogger(f"bbot.core.{self.__class__.__name__.lower()}") + self._debug = debug def pickle(self, obj): try: @@ -78,6 +79,10 @@ async def _infinite_retry(self, callback, *args, **kwargs): if max_retries is not None and retries > max_retries: raise TimeoutError(f"Timed out after {max_retries*interval:,} seconds {context}") + def debug(self, *args, **kwargs): + if self._debug: + self.log.debug(*args, **kwargs) + class EngineClient(EngineBase): """ @@ -114,9 +119,9 @@ class EngineClient(EngineBase): SERVER_CLASS = None - def __init__(self, **kwargs): - super().__init__() + def __init__(self, debug=False, **kwargs): self.name = f"EngineClient {self.__class__.__name__}" + super().__init__(debug=debug) self.process = None if self.SERVER_CLASS is None: raise ValueError(f"Must set EngineClient SERVER_CLASS, {self.SERVER_CLASS}") @@ -141,7 +146,7 @@ def check_error(self, message): async def run_and_return(self, command, *args, **kwargs): fn_str = f"{command}({args}, {kwargs})" - self.log.debug(f"{self.name}: executing run-and-return {fn_str}") + self.debug(f"{self.name}: executing run-and-return {fn_str}") if self._shutdown_status and not command == "_shutdown": self.log.verbose(f"{self.name} has been shut down and is not accepting new tasks") return @@ -150,7 +155,7 @@ async def run_and_return(self, command, *args, **kwargs): message = self.make_message(command, args=args, kwargs=kwargs) if message is error_sentinel: return - await self._infinite_retry(socket.send, message) + await socket.send(message) binary = await self._infinite_retry(socket.recv, _context=f"waiting for return value from {fn_str}") except BaseException: try: @@ -161,7 +166,7 @@ async def run_and_return(self, command, *args, **kwargs): raise # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") message = self.unpickle(binary) - self.log.debug(f"{self.name}: {fn_str} got return value: {message}") + self.debug(f"{self.name}: {fn_str} got return value: {message}") # error handling if self.check_error(message): return @@ -169,7 +174,7 @@ async def run_and_return(self, command, *args, **kwargs): async def run_and_yield(self, command, *args, **kwargs): fn_str = f"{command}({args}, {kwargs})" - self.log.debug(f"{self.name}: executing run-and-yield {fn_str}") + self.debug(f"{self.name}: executing run-and-yield {fn_str}") if self._shutdown_status: self.log.verbose("Engine has been shut down and is not accepting new tasks") return @@ -188,18 +193,18 @@ async def run_and_yield(self, command, *args, **kwargs): ) # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") message = self.unpickle(binary) - self.log.debug(f"{self.name} {command} got iteration: {message}") + self.debug(f"{self.name}: {fn_str} got iteration: {message}") # error handling if self.check_error(message) or self.check_stop(message): break yield message except (StopAsyncIteration, GeneratorExit) as e: exc_name = e.__class__.__name__ - self.log.debug(f"{self.name}.{command} got {exc_name}") + self.debug(f"{self.name}.{command} got {exc_name}") try: await self.send_cancel_message(socket, fn_str) except Exception: - self.log.debug(f"{self.name}.{command} failed to send cancel message after {exc_name}") + self.debug(f"{self.name}.{command} failed to send cancel message after {exc_name}") self.log.trace(traceback.format_exc()) break @@ -266,6 +271,7 @@ def start_server(self): # this allows us to more easily mock http, etc. if os.environ.get("BBOT_TESTING", "") == "True": kwargs["_loop"] = get_event_loop() + kwargs["debug"] = self._debug self.process = CORE.create_process( target=self.server_process, args=( @@ -305,7 +311,7 @@ async def new_socket(self): if self._server_process is None: self._server_process = self.start_server() while not self.socket_path.exists(): - self.log.debug(f"{self.name}: waiting for server process to start...") + self.debug(f"{self.name}: waiting for server process to start...") await asyncio.sleep(0.1) socket = self.context.socket(zmq.DEALER) socket.setsockopt(zmq.LINGER, 0) @@ -366,9 +372,9 @@ class EngineServer(EngineBase): CMDS = {} - def __init__(self, socket_path): - super().__init__() + def __init__(self, socket_path, debug=False): self.name = f"EngineServer {self.__class__.__name__}" + super().__init__(debug=debug) self.socket_path = socket_path self.client_id_var = contextvars.ContextVar("client_id", default=None) # task <--> client id mapping @@ -397,21 +403,21 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs): fn_str = f"{command_fn.__name__}({args}, {kwargs})" with self.client_id_context(client_id): try: - self.log.debug(f"{self.name} run-and-return {fn_str}") + self.debug(f"{self.name}: run-and-return {fn_str}") result = error_sentinel try: result = await command_fn(*args, **kwargs) except BaseException as e: if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): error = f"Error in {self.name}.{fn_str}: {e}" - self.log.debug(error) + self.debug(error) trace = traceback.format_exc() - self.log.debug(trace) + self.debug(trace) result = {"_e": (error, trace)} finally: self.tasks.pop(client_id, None) if result is not error_sentinel: - self.log.debug(f"{self.name}: Sending response to {fn_str}: {result}") + self.debug(f"{self.name}: Sending response to {fn_str}: {result}") await self.send_socket_multipart(client_id, result) except BaseException as e: self.log.critical( @@ -419,27 +425,27 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs): ) self.log.critical(traceback.format_exc()) finally: - self.log.debug(f"{self.name} finished run-and-return {command_fn.__name__}({args}, {kwargs})") + self.debug(f"{self.name} finished run-and-return {command_fn.__name__}({args}, {kwargs})") async def run_and_yield(self, client_id, command_fn, *args, **kwargs): fn_str = f"{command_fn.__name__}({args}, {kwargs})" with self.client_id_context(client_id): try: - self.log.debug(f"{self.name} run-and-yield {fn_str}") + self.debug(f"{self.name}: run-and-yield {fn_str}") try: async for _ in command_fn(*args, **kwargs): - self.log.debug(f"{self.name}: sending iteration for {command_fn.__name__}(): {_}") + self.debug(f"{self.name}: sending iteration for {command_fn.__name__}(): {_}") await self.send_socket_multipart(client_id, _) except BaseException as e: if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): error = f"Error in {self.name}.{fn_str}: {e}" trace = traceback.format_exc() - self.log.debug(error) - self.log.debug(trace) + self.debug(error) + self.debug(trace) result = {"_e": (error, trace)} await self.send_socket_multipart(client_id, result) finally: - self.log.debug(f"{self.name} reached end of run-and-yield iteration for {command_fn.__name__}()") + self.debug(f"{self.name} reached end of run-and-yield iteration for {command_fn.__name__}()") # _s == special signal that means StopIteration await self.send_socket_multipart(client_id, {"_s": None}) self.tasks.pop(client_id, None) @@ -449,7 +455,7 @@ async def run_and_yield(self, client_id, command_fn, *args, **kwargs): ) self.log.critical(traceback.format_exc()) finally: - self.log.debug(f"{self.name} finished run-and-yield {command_fn.__name__}()") + self.debug(f"{self.name} finished run-and-yield {command_fn.__name__}()") async def send_socket_multipart(self, client_id, message): try: @@ -464,7 +470,7 @@ def check_error(self, message): return True async def worker(self): - self.log.debug(f"{self.name}: starting worker") + self.debug(f"{self.name}: starting worker") try: while 1: client_id, binary = await self.socket.recv_multipart() @@ -480,14 +486,14 @@ async def worker(self): # -1 == cancel task if cmd == -1: - self.log.debug(f"{self.name} got cancel signal") + self.debug(f"{self.name} got cancel signal") await self.send_socket_multipart(client_id, {"m": "CANCEL_OK"}) await self.cancel_task(client_id) continue # -99 == shutdown task if cmd == -99: - self.log.debug(f"{self.name} got shutdown signal") + self.debug(f"{self.name} got shutdown signal") await self.send_socket_multipart(client_id, {"m": "SHUTDOWN_OK"}) await self._shutdown() return @@ -525,7 +531,7 @@ async def worker(self): self.log.error(f"{self.name}: error in EngineServer worker: {e}") self.log.trace(traceback.format_exc()) finally: - self.log.debug(f"{self.name}: finished worker()") + self.debug(f"{self.name}: finished worker()") async def _shutdown(self): if not self._shutdown_status: @@ -540,7 +546,7 @@ async def _shutdown(self): self.context.term() except Exception: self.log.trace(traceback.format_exc()) - self.log.debug(f"{self.name}: finished shutting down") + self.log.verbose(f"{self.name}: finished shutting down") def new_child_task(self, client_id, coro): task = asyncio.create_task(coro) @@ -573,11 +579,11 @@ async def cancel_task(self, client_id): if parent_task is None: return parent_task, _cmd, _args, _kwargs = parent_task - self.log.debug(f"{self.name}: Cancelling client id {client_id} (task: {parent_task})") + self.debug(f"{self.name}: Cancelling client id {client_id} (task: {parent_task})") parent_task.cancel() child_tasks = self.child_tasks.pop(client_id, set()) if child_tasks: - self.log.debug(f"{self.name}: Cancelling {len(child_tasks):,} child tasks for client id {client_id}") + self.debug(f"{self.name}: Cancelling {len(child_tasks):,} child tasks for client id {client_id}") for child_task in child_tasks: child_task.cancel() @@ -588,7 +594,7 @@ async def _cancel_task(self, task): try: await asyncio.wait_for(task, timeout=10) except (TimeoutError, asyncio.exceptions.TimeoutError): - self.log.debug(f"{self.name}: Timeout cancelling task") + self.log.trace(f"{self.name}: Timeout cancelling task: {task}") return except (KeyboardInterrupt, asyncio.CancelledError): return diff --git a/bbot/core/helpers/dns/dns.py b/bbot/core/helpers/dns/dns.py index 2f77ce081..07f562132 100644 --- a/bbot/core/helpers/dns/dns.py +++ b/bbot/core/helpers/dns/dns.py @@ -56,7 +56,8 @@ def __init__(self, parent_helper): self.parent_helper = parent_helper self.config = self.parent_helper.config self.dns_config = self.config.get("dns", {}) - super().__init__(server_kwargs={"config": self.config}) + engine_debug = self.config.get("engine", {}).get("debug", False) + super().__init__(server_kwargs={"config": self.config}, debug=engine_debug) # resolver self.timeout = self.dns_config.get("timeout", 5) diff --git a/bbot/core/helpers/dns/engine.py b/bbot/core/helpers/dns/engine.py index 6840d5506..d24c1f766 100644 --- a/bbot/core/helpers/dns/engine.py +++ b/bbot/core/helpers/dns/engine.py @@ -37,8 +37,8 @@ class DNSEngine(EngineServer): 99: "_mock_dns", } - def __init__(self, socket_path, config={}): - super().__init__(socket_path) + def __init__(self, socket_path, config={}, debug=False): + super().__init__(socket_path, debug=debug) self.config = config self.dns_config = self.config.get("dns", {}) diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 30e037e6c..8f7984e2e 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -27,8 +27,8 @@ class HTTPEngine(EngineServer): "max_redirects", ) - def __init__(self, socket_path, target, config={}): - super().__init__(socket_path) + def __init__(self, socket_path, target, config={}, debug=False): + super().__init__(socket_path, debug=debug) self.target = target self.config = config self.web_config = self.config.get("web", {}) diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index 1e8ca3c61..c061a3d62 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -57,7 +57,11 @@ def __init__(self, parent_helper): self.web_spider_distance = self.web_config.get("spider_distance", 0) self.target = self.preset.target self.ssl_verify = self.config.get("ssl_verify", False) - super().__init__(server_kwargs={"config": self.config, "target": self.parent_helper.preset.target.radix_only}) + engine_debug = self.config.get("engine", {}).get("debug", False) + super().__init__( + server_kwargs={"config": self.config, "target": self.parent_helper.preset.target.radix_only}, + debug=engine_debug, + ) def AsyncClient(self, *args, **kwargs): from .client import BBOTAsyncClient diff --git a/bbot/defaults.yml b/bbot/defaults.yml index 62f178898..2ce8d4208 100644 --- a/bbot/defaults.yml +++ b/bbot/defaults.yml @@ -97,6 +97,11 @@ web: # Whether to verify SSL certificates ssl_verify: false +### ENGINE ### + +engine: + debug: false + # Tool dependencies deps: ffuf: diff --git a/bbot/test/test.conf b/bbot/test/test.conf index 8ae91bcf3..63914fe65 100644 --- a/bbot/test/test.conf +++ b/bbot/test/test.conf @@ -36,6 +36,8 @@ dns: - example.com - evilcorp.com - one +engine: + debug: true agent_url: ws://127.0.0.1:8765 agent_token: test speculate: false From fbf1a0f44659f4e54f0adc91450e8f8c2b166833 Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 15 Aug 2024 01:23:50 -0400 Subject: [PATCH 037/124] pin unstructured version --- bbot/modules/unstructured.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 4143ea2fd..25ae307bf 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -67,7 +67,7 @@ class unstructured(BaseModule): } deps_apt = ["libmagic-dev", "poppler-utils", "tesseract-ocr", "libreoffice", "pandoc"] - deps_pip = ["unstructured[all-docs]"] + deps_pip = ["git+https://github.com/Unstructured-IO/unstructured@9b778e270dd8547476370a9417520679cd46c802#egg=unstructured[all-docs]"] async def setup(self): self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) From 5ce846fd6777e9ba492c87f7e713fa43f11c02ae Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 15 Aug 2024 01:37:29 -0400 Subject: [PATCH 038/124] blacked --- bbot/modules/unstructured.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 25ae307bf..7277f40d1 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -67,7 +67,9 @@ class unstructured(BaseModule): } deps_apt = ["libmagic-dev", "poppler-utils", "tesseract-ocr", "libreoffice", "pandoc"] - deps_pip = ["git+https://github.com/Unstructured-IO/unstructured@9b778e270dd8547476370a9417520679cd46c802#egg=unstructured[all-docs]"] + deps_pip = [ + "git+https://github.com/Unstructured-IO/unstructured@9b778e270dd8547476370a9417520679cd46c802#egg=unstructured[all-docs]" + ] async def setup(self): self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) From 8c9440ab75b944f74d00e8737635f40fb8925e23 Mon Sep 17 00:00:00 2001 From: GitHub Date: Fri, 16 Aug 2024 00:22:13 +0000 Subject: [PATCH 039/124] Update nuclei --- bbot/modules/deadly/nuclei.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 9eeae9109..45d76411c 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -15,7 +15,7 @@ class nuclei(BaseModule): } options = { - "version": "3.2.0", + "version": "3.3.0", "tags": "", "templates": "", "severity": "", From 3a578298bc498a036938832f65204c489ac14878 Mon Sep 17 00:00:00 2001 From: GitHub Date: Fri, 16 Aug 2024 00:22:18 +0000 Subject: [PATCH 040/124] Update trufflehog --- bbot/modules/trufflehog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index 875aa4f2a..aacc23a77 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -13,7 +13,7 @@ class trufflehog(BaseModule): } options = { - "version": "3.81.7", + "version": "3.81.9", "only_verified": True, "concurrency": 8, "deleted_forks": False, From 9e9fa0b8c5becf0ec10067e43de0082efe84606c Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 15 Aug 2024 22:01:07 -0400 Subject: [PATCH 041/124] remove debugging message --- bbot/modules/templates/bucket.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bbot/modules/templates/bucket.py b/bbot/modules/templates/bucket.py index 3b7bde789..845ae0e9a 100644 --- a/bbot/modules/templates/bucket.py +++ b/bbot/modules/templates/bucket.py @@ -89,7 +89,6 @@ async def handle_storage_bucket(self, event): async def emit_storage_bucket(self, event_data, event_type, parent, tags, context): event_data["url"] = self.clean_bucket_url(event_data["url"]) - self.hugewarning(event_data) await self.emit_event( event_data, event_type, From c39d56110f37a1634804d812cd3c41f315fc589f Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 16 Aug 2024 00:44:05 -0400 Subject: [PATCH 042/124] new commit hash --- bbot/modules/unstructured.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 7277f40d1..31d0a96ee 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -68,7 +68,7 @@ class unstructured(BaseModule): deps_apt = ["libmagic-dev", "poppler-utils", "tesseract-ocr", "libreoffice", "pandoc"] deps_pip = [ - "git+https://github.com/Unstructured-IO/unstructured@9b778e270dd8547476370a9417520679cd46c802#egg=unstructured[all-docs]" + "git+https://github.com/Unstructured-IO/unstructured@d0211cc41faa3988b0cfdefa3e0a8f80adbf013b#egg=unstructured[all-docs]" ] async def setup(self): From acb77dec7384567bb783438c659b0aee70632575 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 16 Aug 2024 09:48:31 -0400 Subject: [PATCH 043/124] support usernames as targets --- bbot/scanner/target.py | 4 ++-- bbot/test/test_step_1/test_target.py | 17 +++++++++++++---- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index 8b88882ce..aff8b3227 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -277,8 +277,8 @@ def __init__(self, *targets, strict_scope=False, scan=None, acl_mode=False): self.strict_scope = strict_scope self.acl_mode = acl_mode self.special_event_types = { - "ORG_STUB": re.compile(r"^ORG:(.*)", re.IGNORECASE), - "ASN": re.compile(r"^ASN:(.*)", re.IGNORECASE), + "ORG_STUB": re.compile(r"^(?:ORG|ORG_STUB):(.*)", re.IGNORECASE), + "USERNAME": re.compile(r"^(?:USER|USERNAME):(.*)", re.IGNORECASE), } self._events = set() self._radix = RadixTarget() diff --git a/bbot/test/test_step_1/test_target.py b/bbot/test/test_step_1/test_target.py index 23175607f..efdf089d3 100644 --- a/bbot/test/test_step_1/test_target.py +++ b/bbot/test/test_step_1/test_target.py @@ -178,10 +178,19 @@ async def test_target(bbot_scanner): assert list(bbottarget.whitelist) == ["evilcorp.net"] assert list(bbottarget.blacklist) == ["evilcorp.org"] - scan = bbot_scanner("ORG:evilcorp") - events = [e async for e in scan.async_start()] - assert len(events) == 2 - assert set([e.type for e in events]) == {"SCAN", "ORG_STUB"} + # test org stub as target + for org_target in ("ORG:evilcorp", "ORG_STUB:evilcorp"): + scan = bbot_scanner(org_target) + events = [e async for e in scan.async_start()] + assert len(events) == 2 + assert set([e.type for e in events]) == {"SCAN", "ORG_STUB"} + + # test username as target + for user_target in ("USER:vancerefrigeration", "USERNAME:vancerefrigeration"): + scan = bbot_scanner(user_target) + events = [e async for e in scan.async_start()] + assert len(events) == 2 + assert set([e.type for e in events]) == {"SCAN", "USERNAME"} # verify hash values bbottarget = BBOTTarget( From 5c985aa2065a52af220bff811c992b508449f8c3 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 16 Aug 2024 15:25:10 +0100 Subject: [PATCH 044/124] Only add url to `FINDING` event if it exists in the source_event --- bbot/modules/internal/excavate.py | 48 +++++++++++-------------------- 1 file changed, 16 insertions(+), 32 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index a19daf940..37f5920dc 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -181,12 +181,9 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte """ for identifier, results in yara_results.items(): for result in results: - if isinstance(event.data, dict): - url = event.data.get("url", "") - else: - url = "" - - event_data = {"host": str(event.host), "url": url} + event_data = {"host": str(event.host)} + if "url" in event.data: + event_data["url"] = event.data["url"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description}" if yara_rule_settings.emit_match: event_data["description"] += f" [{result}]" @@ -273,12 +270,9 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier, results in yara_results.items(): for result in results: - if isinstance(event.data, dict): - url = event.data.get("url", "") - else: - url = "" - - event_data = {"host": str(event.host), "url": url} + event_data = {"host": str(event.host)} + if "url" in event.data: + event_data["url"] = event.data["url"] description_string = ( f" with description: [{yara_rule_settings.description}]" if yara_rule_settings.description else "" ) @@ -592,16 +586,11 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier in yara_results.keys(): for findings in yara_results[identifier]: - if isinstance(event.data, dict): - url = event.data.get("url", "") - else: - url = "" - - event_data = { - "host": str(event.host), - "url": url, - "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})", - } + event_data = {"host": str(event.host)} + if "url" in event.data: + event_data["url"] = event.data["url"] + event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" + await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") class SerializationExtractor(ExcavateRule): @@ -629,16 +618,11 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier in yara_results.keys(): for findings in yara_results[identifier]: - if isinstance(event.data, dict): - url = event.data.get("url", "") - else: - url = "" - - event_data = { - "host": str(event.host), - "url": url, - "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})", - } + event_data = {"host": str(event.host)} + if "url" in event.data: + event_data["url"] = event.data["url"] + event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" + await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") class FunctionalityExtractor(ExcavateRule): From c48a5c627cf2d3a5c4f08f6747582f5ffb961245 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 16 Aug 2024 15:43:56 +0100 Subject: [PATCH 045/124] Add the parents path to the `FINDING` / `VULNERABILITY` --- bbot/modules/internal/excavate.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 37f5920dc..2403fd060 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -184,6 +184,8 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if "url" in event.data: event_data["url"] = event.data["url"] + if "path" in event.parent.data: + event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description}" if yara_rule_settings.emit_match: event_data["description"] += f" [{result}]" @@ -273,6 +275,8 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if "url" in event.data: event_data["url"] = event.data["url"] + if "path" in event.parent.data: + event_data["path"] = event.parent.data["path"] description_string = ( f" with description: [{yara_rule_settings.description}]" if yara_rule_settings.description else "" ) @@ -589,6 +593,8 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if "url" in event.data: event_data["url"] = event.data["url"] + if "path" in event.parent.data: + event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") @@ -621,6 +627,8 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if "url" in event.data: event_data["url"] = event.data["url"] + if "path" in event.parent.data: + event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") From 4abd67c8caefe2f822c8406e1985b1e4062d6cac Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 16 Aug 2024 16:24:01 +0100 Subject: [PATCH 046/124] Get the url from the event dict --- bbot/modules/internal/excavate.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 2403fd060..3f609d2b0 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -182,9 +182,9 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte for identifier, results in yara_results.items(): for result in results: event_data = {"host": str(event.host)} - if "url" in event.data: + if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if "path" in event.parent.data: + if event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description}" if yara_rule_settings.emit_match: @@ -273,9 +273,9 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte for identifier, results in yara_results.items(): for result in results: event_data = {"host": str(event.host)} - if "url" in event.data: + if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if "path" in event.parent.data: + if event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] description_string = ( f" with description: [{yara_rule_settings.description}]" if yara_rule_settings.description else "" @@ -591,9 +591,9 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte for identifier in yara_results.keys(): for findings in yara_results[identifier]: event_data = {"host": str(event.host)} - if "url" in event.data: + if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if "path" in event.parent.data: + if event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" @@ -625,9 +625,9 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte for identifier in yara_results.keys(): for findings in yara_results[identifier]: event_data = {"host": str(event.host)} - if "url" in event.data: + if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if "path" in event.parent.data: + if event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" From b03df8037139eceaee2791cce7d2b08db781811f Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 16 Aug 2024 16:56:06 +0100 Subject: [PATCH 047/124] Check if parent data is a dictionary --- bbot/modules/internal/excavate.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 3f609d2b0..425a6166d 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -184,7 +184,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if event.parent.data.get("path"): + if isinstance(event.data, dict) and event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description}" if yara_rule_settings.emit_match: @@ -275,7 +275,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if event.parent.data.get("path"): + if isinstance(event.data, dict) and event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] description_string = ( f" with description: [{yara_rule_settings.description}]" if yara_rule_settings.description else "" @@ -593,7 +593,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if event.parent.data.get("path"): + if isinstance(event.data, dict) and event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" @@ -627,7 +627,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if event.parent.data.get("path"): + if isinstance(event.data, dict) and event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" From 121462c7410cdd4872f4bba5e16c4997b3851321 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 16 Aug 2024 17:31:26 +0100 Subject: [PATCH 048/124] Improved tests and fixed parent event --- bbot/modules/internal/excavate.py | 8 +++--- .../module_tests/test_module_excavate.py | 27 +++++++++++++------ 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 425a6166d..6e45d5055 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -184,7 +184,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if isinstance(event.data, dict) and event.parent.data.get("path"): + if isinstance(event.parent.data, dict) and event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description}" if yara_rule_settings.emit_match: @@ -275,7 +275,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if isinstance(event.data, dict) and event.parent.data.get("path"): + if isinstance(event.parent.data, dict) and event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] description_string = ( f" with description: [{yara_rule_settings.description}]" if yara_rule_settings.description else "" @@ -593,7 +593,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if isinstance(event.data, dict) and event.parent.data.get("path"): + if isinstance(event.parent.data, dict) and event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" @@ -627,7 +627,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if isinstance(event.data, dict) and event.parent.data.get("path"): + if isinstance(event.parent.data, dict) and event.parent.data.get("path"): event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 7938ccf80..820d5d33c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -993,15 +993,26 @@ def check(self, module_test, events): assert ( raw_text_events[0].data == self.unstructured_response ), f"Text extracted from PDF is incorrect, got {raw_text_events[0].data}" - event_data = [e.data for e in events] - assert "example@blacklanternsecurity.notreal" in event_data + email_events = [e for e in events if e.type == "EMAIL_ADDRESS"] + assert 1 == len(email_events), "Failed to emmit EMAIL_ADDRESS event" + assert ( + email_events[0].data == "example@blacklanternsecurity.notreal" + ), f"Email extracted from unstructured text is incorrect, got {email_events[0].data}" + finding_events = [e for e in events if e.type == "FINDING"] + assert 2 == len(finding_events), "Failed to emmit FINDING events" + finding_event_data = [e.data for e in finding_events] assert ( "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c" - in event_data - ) + in finding_event_data + ), f"Failed to emmit JWT event got {finding_event_data}" assert ( "AAEAAAD/////AQAAAAAAAAAMAgAAAFJTeXN0ZW0uQ29sbGVjdGlvbnMuR2VuZXJpYy5MaXN0YDFbW1N5c3RlbS5TdHJpbmddXSwgU3lzdGVtLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49YjAzZjVmN2YxMWQ1MGFlMwEAAAAIQ29tcGFyZXIQSXRlbUNvdW50AQMAAAAJAwAAAAlTeXN0ZW0uU3RyaW5nW10FAAAACQIAAAAJBAAAAAkFAAAACRcAAAAJCgAAAAkLAAAACQwAAAAJDQAAAAkOAAAACQ8AAAAJEAAAAAkRAAAACRIAAAAJEwAAAA==" - in event_data - ) - assert "https://www.test.notreal/about" in event_data - assert "/donot_detect.js" not in event_data + in finding_event_data + ), f"Failed to emmit serialized event got {finding_event_data}" + assert finding_events[0].data["path"] == str(file), "File path not included in finding event" + url_events = [e for e in events if e.type == "URL"] + assert 1 == len(url_events), "Failed to emmit URL event" + assert ( + url_events[0].data == "https://www.test.notreal/about" + ), f"URL extracted from unstructured text is incorrect, got {url_events[0].data}" + assert "/donot_detect.js" not in [e.data for e in events] From 48aae2e120d8e26403e5d3c84ad2b607c208b2fc Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 16 Aug 2024 18:05:02 +0100 Subject: [PATCH 049/124] Made changes to test. *crosses fingers --- .../module_tests/test_module_excavate.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 820d5d33c..47fd4c689 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -1000,15 +1000,12 @@ def check(self, module_test, events): ), f"Email extracted from unstructured text is incorrect, got {email_events[0].data}" finding_events = [e for e in events if e.type == "FINDING"] assert 2 == len(finding_events), "Failed to emmit FINDING events" - finding_event_data = [e.data for e in finding_events] - assert ( - "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c" - in finding_event_data - ), f"Failed to emmit JWT event got {finding_event_data}" - assert ( - "AAEAAAD/////AQAAAAAAAAAMAgAAAFJTeXN0ZW0uQ29sbGVjdGlvbnMuR2VuZXJpYy5MaXN0YDFbW1N5c3RlbS5TdHJpbmddXSwgU3lzdGVtLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49YjAzZjVmN2YxMWQ1MGFlMwEAAAAIQ29tcGFyZXIQSXRlbUNvdW50AQMAAAAJAwAAAAlTeXN0ZW0uU3RyaW5nW10FAAAACQIAAAAJBAAAAAkFAAAACRcAAAAJCgAAAAkLAAAACQwAAAAJDQAAAAkOAAAACQ8AAAAJEAAAAAkRAAAACRIAAAAJEwAAAA==" - in finding_event_data - ), f"Failed to emmit serialized event got {finding_event_data}" + assert any( + e.type == "FINDING" and "JWT" in e.data["description"] for e in finding_events + ), f"Failed to emmit JWT event got {finding_events}" + assert any( + e.type == "FINDING" and "DOTNET" in e.data["description"] for e in finding_events + ), f"Failed to emmit serialized event got {finding_events}" assert finding_events[0].data["path"] == str(file), "File path not included in finding event" url_events = [e for e in events if e.type == "URL"] assert 1 == len(url_events), "Failed to emmit URL event" From 6b4217b59255ddb8be46770c20f838bf04bc1bc0 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 16 Aug 2024 18:33:38 +0100 Subject: [PATCH 050/124] D'oh --- bbot/test/test_step_2/module_tests/test_module_excavate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 47fd4c689..e20e12fb0 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -1007,8 +1007,8 @@ def check(self, module_test, events): e.type == "FINDING" and "DOTNET" in e.data["description"] for e in finding_events ), f"Failed to emmit serialized event got {finding_events}" assert finding_events[0].data["path"] == str(file), "File path not included in finding event" - url_events = [e for e in events if e.type == "URL"] - assert 1 == len(url_events), "Failed to emmit URL event" + url_events = [e for e in events if e.type == "URL_UNVERIFIED"] + assert 1 == len(url_events), "Failed to emmit URL_UNVERIFIED event" assert ( url_events[0].data == "https://www.test.notreal/about" ), f"URL extracted from unstructured text is incorrect, got {url_events[0].data}" From 4df4d4a7ea23bb66b03efb499caecf5786ebc7d7 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 16 Aug 2024 19:03:57 +0100 Subject: [PATCH 051/124] Correct tests for `URL_UNVERIFIED` events --- .../test_step_2/module_tests/test_module_excavate.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index e20e12fb0..49e9d75d4 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -1007,9 +1007,6 @@ def check(self, module_test, events): e.type == "FINDING" and "DOTNET" in e.data["description"] for e in finding_events ), f"Failed to emmit serialized event got {finding_events}" assert finding_events[0].data["path"] == str(file), "File path not included in finding event" - url_events = [e for e in events if e.type == "URL_UNVERIFIED"] - assert 1 == len(url_events), "Failed to emmit URL_UNVERIFIED event" - assert ( - url_events[0].data == "https://www.test.notreal/about" - ), f"URL extracted from unstructured text is incorrect, got {url_events[0].data}" - assert "/donot_detect.js" not in [e.data for e in events] + url_events = [e.data for e in events if e.type == "URL_UNVERIFIED"] + assert "https://www.test.notreal/about" in url_events, f"URL extracted from unstructured text is incorrect, got {url_events}" + assert "/donot_detect.js" not in url_events, f"URL extracted from unstructured text is incorrect, got {url_events}" From 672c790b6e4c95e3e63f48e2e0f8c18e7582ff6f Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 16 Aug 2024 19:15:06 +0100 Subject: [PATCH 052/124] bro, do you even unit test? --- .../test/test_step_2/module_tests/test_module_excavate.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 49e9d75d4..0df7632b1 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -1008,5 +1008,9 @@ def check(self, module_test, events): ), f"Failed to emmit serialized event got {finding_events}" assert finding_events[0].data["path"] == str(file), "File path not included in finding event" url_events = [e.data for e in events if e.type == "URL_UNVERIFIED"] - assert "https://www.test.notreal/about" in url_events, f"URL extracted from unstructured text is incorrect, got {url_events}" - assert "/donot_detect.js" not in url_events, f"URL extracted from unstructured text is incorrect, got {url_events}" + assert ( + "https://www.test.notreal/about" in url_events + ), f"URL extracted from unstructured text is incorrect, got {url_events}" + assert ( + "/donot_detect.js" not in url_events + ), f"URL extracted from unstructured text is incorrect, got {url_events}" From e735daa502b817d80e55b522dd3eb0ac9f32eb89 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 16 Aug 2024 16:58:39 -0400 Subject: [PATCH 053/124] better host inheritance --- bbot/core/engine.py | 2 +- bbot/core/event/base.py | 56 ++++++++++++++-------------- bbot/modules/gowitness.py | 2 +- bbot/test/test_step_1/test_events.py | 37 ++++++++++-------- 4 files changed, 52 insertions(+), 45 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index 06498d259..d5fb2ec13 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -74,7 +74,7 @@ async def _infinite_retry(self, callback, *args, **kwargs): try: return await asyncio.wait_for(callback(*args, **kwargs), timeout=interval) except (TimeoutError, asyncio.exceptions.TimeoutError): - self.log.debug(f"{self.name}: Timeout after {interval:,} seconds{context}, retrying...") + self.log.debug(f"{self.name}: Timeout after {interval:,} seconds {context}, retrying...") retries += 1 if max_retries is not None and retries > max_retries: raise TimeoutError(f"Timed out after {max_retries*interval:,} seconds {context}") diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 89b5a84a3..9947e616d 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -335,15 +335,6 @@ def host_original(self): return self.host return self._host_original - @property - def closest_host(self): - """ - Walk up the chain of parents events until we hit the first one with a host - """ - if self.host is not None or self.parent is None or self.parent is self: - return self.host - return self.parent.closest_host - @property def port(self): self.host @@ -602,7 +593,7 @@ def get_parents(self, omit=False, include_self=False): return parents def _host(self): - return None + return "" def _sanitize_data(self, data): """ @@ -954,30 +945,39 @@ def _host(self): class ClosestHostEvent(DictHostEvent): - # if a host isn't specified, this event type uses the host from the closest parent + # if a host/path/url isn't specified, this event type grabs it from the closest parent # inherited by FINDING and VULNERABILITY def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - if "host" not in self.data: - closest_host = self.closest_host - if closest_host is None: - raise ValueError("No host was found in event parents. Host must be specified!") - self.data["host"] = str(closest_host) + if not self.host: + for parent in self.get_parents(include_self=True): + # inherit closest URL + if not "url" in self.data: + parent_url = getattr(parent, "parsed_url", None) + if parent_url is not None: + self.data["url"] = parent_url.geturl() + # inherit closest path + if not "path" in self.data and isinstance(parent.data, dict): + parent_path = parent.data.get("path", None) + if parent_path is not None: + self.data["path"] = parent_path + # inherit closest host + if parent.host: + self.data["host"] = str(parent.host) + break + # die if we still haven't found a host + if not self.host: + raise ValueError("No host was found in event parents. Host must be specified!") class DictPathEvent(DictEvent): - _path_keywords = ["path", "filename"] - def sanitize_data(self, data): new_data = dict(data) file_blobs = getattr(self.scan, "_file_blobs", False) folder_blobs = getattr(self.scan, "_folder_blobs", False) - for path_keyword in self._path_keywords: - blob = None - try: - data_path = Path(data[path_keyword]) - except KeyError: - continue + blob = None + try: + data_path = Path(data["path"]) if data_path.is_file(): self.add_tag("file") if file_blobs: @@ -987,10 +987,10 @@ def sanitize_data(self, data): self.add_tag("folder") if folder_blobs: blob = self._tar_directory(data_path) - else: - continue - if blob: - new_data["blob"] = base64.b64encode(blob).decode("utf-8") + except KeyError: + pass + if blob: + new_data["blob"] = base64.b64encode(blob).decode("utf-8") return new_data diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 93950e340..5bfdfc42a 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -140,7 +140,7 @@ async def handle_batch(self, *events): url = screenshot["url"] final_url = screenshot["final_url"] filename = self.screenshot_path / screenshot["filename"] - webscreenshot_data = {"filename": str(filename), "url": final_url} + webscreenshot_data = {"path": str(filename), "url": final_url} parent_event = event_dict[url] await self.emit_event( webscreenshot_data, diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index e9d1edeaf..913035d66 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -813,25 +813,32 @@ def test_event_closest_host(): # first event has a host event1 = scan.make_event("evilcorp.com", "DNS_NAME", parent=scan.root_event) assert event1.host == "evilcorp.com" - assert event1.closest_host == "evilcorp.com" - # second event has no host - event2 = scan.make_event("wat", "ASDF", parent=event1) - assert event2.host == None - assert event2.closest_host == "evilcorp.com" - # finding automatically uses the host from the first event - finding = scan.make_event({"path": "/tmp/asdf.txt", "description": "test"}, "FINDING", parent=event2) - assert finding.data["host"] == "evilcorp.com" - assert finding.host == "evilcorp.com" - # same with vuln - vuln = scan.make_event( - {"path": "/tmp/asdf.txt", "description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event2 + # second event has a host + url + event2 = scan.make_event( + {"method": "GET", "url": "http://www.evilcorp.com/asdf", "hash": {"header_mmh3": "1", "body_mmh3": "2"}}, + "HTTP_RESPONSE", + parent=event1, ) - assert vuln.data["host"] == "evilcorp.com" - assert vuln.host == "evilcorp.com" + assert event2.host == "www.evilcorp.com" + # third event has a path + event3 = scan.make_event({"path": "/tmp/asdf.txt"}, "FILESYSTEM", parent=event2) + assert not event3.host + # finding automatically uses the host from the second event + finding = scan.make_event({"description": "test"}, "FINDING", parent=event3) + assert finding.data["host"] == "www.evilcorp.com" + assert finding.data["url"] == "http://www.evilcorp.com/asdf" + assert finding.data["path"] == "/tmp/asdf.txt" + assert finding.host == "www.evilcorp.com" + # same with vuln + vuln = scan.make_event({"description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event3) + assert vuln.data["host"] == "www.evilcorp.com" + assert vuln.data["url"] == "http://www.evilcorp.com/asdf" + assert vuln.data["path"] == "/tmp/asdf.txt" + assert vuln.host == "www.evilcorp.com" # no host == not allowed event3 = scan.make_event("wat", "ASDF", parent=scan.root_event) - assert event3.host == None + assert not event3.host with pytest.raises(ValueError): finding = scan.make_event({"path": "/tmp/asdf.txt", "description": "test"}, "FINDING", parent=event3) with pytest.raises(ValueError): From 3b1b7f85c3a1d67dae022bee20f7fa91ca50a5c3 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 16 Aug 2024 17:27:41 -0400 Subject: [PATCH 054/124] fix httpx memory leak --- bbot/core/helpers/web/engine.py | 14 +++++++++++--- bbot/modules/bucket_azure.py | 2 +- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 8f7984e2e..a4e0abdff 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -34,12 +34,20 @@ def __init__(self, socket_path, target, config={}, debug=False): self.web_config = self.config.get("web", {}) self.http_debug = self.web_config.get("debug", False) self._ssl_context_noverify = None - self.web_client = self.AsyncClient(persist_cookies=False) + self.web_clients = {} + self.web_clients[0] = self.AsyncClient(persist_cookies=False, retries=0) + self.web_client = self.web_clients[0] def AsyncClient(self, *args, **kwargs): - from .client import BBOTAsyncClient + # cache by retries to prevent unwanted accumulation of clients + # (they are not garbage-collected) + retries = kwargs.get("retries", 0) + try: + return self.web_clients[retries] + except KeyError: + from .client import BBOTAsyncClient - return BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) + return BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) async def request(self, *args, **kwargs): raise_error = kwargs.pop("raise_error", False) diff --git a/bbot/modules/bucket_azure.py b/bbot/modules/bucket_azure.py index 032e409b4..dcf90eb34 100644 --- a/bbot/modules/bucket_azure.py +++ b/bbot/modules/bucket_azure.py @@ -24,7 +24,7 @@ class bucket_azure(bucket_template): def build_bucket_request(self, bucket_name, base_domain, region): url = self.build_url(bucket_name, base_domain, region) url = url.strip("/") + f"/{bucket_name}?restype=container" - return url, {"retries": 0} + return url, {} def check_bucket_exists(self, bucket_name, response): status_code = getattr(response, "status_code", 0) From aa1e93c57692057d784bcc96990881678a20f08b Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 16 Aug 2024 17:28:52 -0400 Subject: [PATCH 055/124] better caching --- bbot/core/helpers/web/engine.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index a4e0abdff..448d95d58 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -47,7 +47,9 @@ def AsyncClient(self, *args, **kwargs): except KeyError: from .client import BBOTAsyncClient - return BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) + client = BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) + self.web_clients[retries] = client + return client async def request(self, *args, **kwargs): raise_error = kwargs.pop("raise_error", False) From 01d9b8f17bf7edf3fc42d23643d28ff784b2dab1 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 16 Aug 2024 23:00:46 -0400 Subject: [PATCH 056/124] update unstructured to use pypi --- bbot/modules/unstructured.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 31d0a96ee..88ab601c8 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -67,9 +67,7 @@ class unstructured(BaseModule): } deps_apt = ["libmagic-dev", "poppler-utils", "tesseract-ocr", "libreoffice", "pandoc"] - deps_pip = [ - "git+https://github.com/Unstructured-IO/unstructured@d0211cc41faa3988b0cfdefa3e0a8f80adbf013b#egg=unstructured[all-docs]" - ] + deps_pip = ["unstructured[all-docs]~=0.5.15"] async def setup(self): self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) From 147579d0c891e06682cfbdf520307485b4710f6d Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Sat, 17 Aug 2024 09:37:13 +0100 Subject: [PATCH 057/124] Remove getting parent.path in excavate as the `FINDING` does that for us --- bbot/modules/internal/excavate.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 6e45d5055..c87683bad 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -184,8 +184,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if isinstance(event.parent.data, dict) and event.parent.data.get("path"): - event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description}" if yara_rule_settings.emit_match: event_data["description"] += f" [{result}]" @@ -275,8 +273,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if isinstance(event.parent.data, dict) and event.parent.data.get("path"): - event_data["path"] = event.parent.data["path"] description_string = ( f" with description: [{yara_rule_settings.description}]" if yara_rule_settings.description else "" ) @@ -593,8 +589,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if isinstance(event.parent.data, dict) and event.parent.data.get("path"): - event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") @@ -627,8 +621,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte event_data = {"host": str(event.host)} if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] - if isinstance(event.parent.data, dict) and event.parent.data.get("path"): - event_data["path"] = event.parent.data["path"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") From 8048e70e86e031415417a4a3a4bd43e4965aa516 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Sat, 17 Aug 2024 10:20:35 +0100 Subject: [PATCH 058/124] if there is an event host from `HTTP_RESPONSE` then set this in the event_data otherwise allow parent inheritence --- bbot/modules/internal/excavate.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index c87683bad..261fd85df 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -181,7 +181,9 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte """ for identifier, results in yara_results.items(): for result in results: - event_data = {"host": str(event.host)} + event_data = {} + if event.host: + event_data["host"] = str(event.host) if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description}" @@ -270,7 +272,9 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier, results in yara_results.items(): for result in results: - event_data = {"host": str(event.host)} + event_data = {} + if event.host: + event_data["host"] = str(event.host) if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] description_string = ( @@ -586,7 +590,9 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier in yara_results.keys(): for findings in yara_results[identifier]: - event_data = {"host": str(event.host)} + event_data = {} + if event.host: + event_data["host"] = str(event.host) if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" @@ -618,7 +624,9 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier in yara_results.keys(): for findings in yara_results[identifier]: - event_data = {"host": str(event.host)} + event_data = {} + if event.host: + event_data["host"] = str(event.host) if isinstance(event.data, dict) and event.data.get("url"): event_data["url"] = event.data["url"] event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" From a0a8f32c3738724fbf7aa9a5547675076258f54d Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 17 Aug 2024 08:19:54 -0400 Subject: [PATCH 059/124] clean up --- bbot/modules/internal/excavate.py | 31 +++++-------------- .../module_tests/test_module_excavate.py | 16 ++++++++-- 2 files changed, 21 insertions(+), 26 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 261fd85df..bf8b22516 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -181,12 +181,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte """ for identifier, results in yara_results.items(): for result in results: - event_data = {} - if event.host: - event_data["host"] = str(event.host) - if isinstance(event.data, dict) and event.data.get("url"): - event_data["url"] = event.data["url"] - event_data["description"] = f"{discovery_context} {yara_rule_settings.description}" + event_data = {"description": f"{discovery_context} {yara_rule_settings.description}"} if yara_rule_settings.emit_match: event_data["description"] += f" [{result}]" await self.report(event_data, event, yara_rule_settings, discovery_context) @@ -273,10 +268,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte for identifier, results in yara_results.items(): for result in results: event_data = {} - if event.host: - event_data["host"] = str(event.host) - if isinstance(event.data, dict) and event.data.get("url"): - event_data["url"] = event.data["url"] description_string = ( f" with description: [{yara_rule_settings.description}]" if yara_rule_settings.description else "" ) @@ -590,13 +581,9 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier in yara_results.keys(): for findings in yara_results[identifier]: - event_data = {} - if event.host: - event_data["host"] = str(event.host) - if isinstance(event.data, dict) and event.data.get("url"): - event_data["url"] = event.data["url"] - event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" - + event_data = { + "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})" + } await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") class SerializationExtractor(ExcavateRule): @@ -624,13 +611,9 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier in yara_results.keys(): for findings in yara_results[identifier]: - event_data = {} - if event.host: - event_data["host"] = str(event.host) - if isinstance(event.data, dict) and event.data.get("url"): - event_data["url"] = event.data["url"] - event_data["description"] = f"{discovery_context} {yara_rule_settings.description} ({identifier})" - + event_data = { + "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})" + } await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") class FunctionalityExtractor(ExcavateRule): diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 0df7632b1..576e1de33 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -1001,10 +1001,22 @@ def check(self, module_test, events): finding_events = [e for e in events if e.type == "FINDING"] assert 2 == len(finding_events), "Failed to emmit FINDING events" assert any( - e.type == "FINDING" and "JWT" in e.data["description"] for e in finding_events + e.type == "FINDING" + and "JWT" in e.data["description"] + and e.data["url"] == "http://127.0.0.1:8888/Test_PDF" + and e.data["host"] == "127.0.0.1" + and e.data["path"].endswith("http-127-0-0-1-8888-test-pdf.pdf") + and str(e.host) == "127.0.0.1" + for e in finding_events ), f"Failed to emmit JWT event got {finding_events}" assert any( - e.type == "FINDING" and "DOTNET" in e.data["description"] for e in finding_events + e.type == "FINDING" + and "DOTNET" in e.data["description"] + and e.data["url"] == "http://127.0.0.1:8888/Test_PDF" + and e.data["host"] == "127.0.0.1" + and e.data["path"].endswith("http-127-0-0-1-8888-test-pdf.pdf") + and str(e.host) == "127.0.0.1" + for e in finding_events ), f"Failed to emmit serialized event got {finding_events}" assert finding_events[0].data["path"] == str(file), "File path not included in finding event" url_events = [e.data for e in events if e.type == "URL_UNVERIFIED"] From 62846a06f50829d936001b7bdf360d91d8135a04 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 17 Aug 2024 08:30:22 -0400 Subject: [PATCH 060/124] fix version --- bbot/modules/unstructured.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 88ab601c8..c58efa37e 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -67,7 +67,7 @@ class unstructured(BaseModule): } deps_apt = ["libmagic-dev", "poppler-utils", "tesseract-ocr", "libreoffice", "pandoc"] - deps_pip = ["unstructured[all-docs]~=0.5.15"] + deps_pip = ["unstructured[all-docs]>=0.5.15,<1.0"] async def setup(self): self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) From 2688baeb2b1cf6fe9b1e28215ab817d8459dcd9a Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 17 Aug 2024 08:30:50 -0400 Subject: [PATCH 061/124] update readme --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 51e7a5300..ad2ad61d0 100644 --- a/README.md +++ b/README.md @@ -29,6 +29,9 @@ Passive API sources plus a recursive DNS brute-force with target-specific subdom ```bash # find subdomains of evilcorp.com bbot -t evilcorp.com -p subdomain-enum + +# passive sources only +bbot -t evilcorp.com -p subdomain-enum -rf passive ``` From 7520e93aea2886ba9902ee37c7356ab7a35b0d55 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 04:41:55 +0000 Subject: [PATCH 062/124] Bump mkdocstrings-python from 1.10.3 to 1.10.8 Bumps [mkdocstrings-python](https://github.com/mkdocstrings/python) from 1.10.3 to 1.10.8. - [Release notes](https://github.com/mkdocstrings/python/releases) - [Changelog](https://github.com/mkdocstrings/python/blob/main/CHANGELOG.md) - [Commits](https://github.com/mkdocstrings/python/compare/1.10.3...1.10.8) --- updated-dependencies: - dependency-name: mkdocstrings-python dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 802c73d9f..99d41218a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -673,13 +673,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "griffe" -version = "0.45.3" +version = "1.1.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.45.3-py3-none-any.whl", hash = "sha256:ed1481a680ae3e28f91a06e0d8a51a5c9b97555aa2527abc2664447cc22337d6"}, - {file = "griffe-0.45.3.tar.gz", hash = "sha256:02ee71cc1a5035864b97bd0dbfff65c33f6f2c8854d3bd48a791905c2b8a44b9"}, + {file = "griffe-1.1.0-py3-none-any.whl", hash = "sha256:38ccc5721571c95ae427123074cf0dc0d36bce7c9701ab2ada9fe0566ff50c10"}, + {file = "griffe-1.1.0.tar.gz", hash = "sha256:c6328cbdec0d449549c1cc332f59227cd5603f903479d73e4425d828b782ffc3"}, ] [package.dependencies] @@ -1307,17 +1307,17 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] [[package]] name = "mkdocstrings-python" -version = "1.10.3" +version = "1.10.8" description = "A Python handler for mkdocstrings." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocstrings_python-1.10.3-py3-none-any.whl", hash = "sha256:11ff6d21d3818fb03af82c3ea6225b1534837e17f790aa5f09626524171f949b"}, - {file = "mkdocstrings_python-1.10.3.tar.gz", hash = "sha256:321cf9c732907ab2b1fedaafa28765eaa089d89320f35f7206d00ea266889d03"}, + {file = "mkdocstrings_python-1.10.8-py3-none-any.whl", hash = "sha256:bb12e76c8b071686617f824029cb1dfe0e9afe89f27fb3ad9a27f95f054dcd89"}, + {file = "mkdocstrings_python-1.10.8.tar.gz", hash = "sha256:5856a59cbebbb8deb133224a540de1ff60bded25e54d8beacc375bb133d39016"}, ] [package.dependencies] -griffe = ">=0.44" +griffe = ">=0.49" mkdocstrings = ">=0.25" [[package]] From a9a849b645f0a8eef7535cfba6895f746d0c7f04 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 04:42:18 +0000 Subject: [PATCH 063/124] Bump cachetools from 5.3.3 to 5.5.0 Bumps [cachetools](https://github.com/tkem/cachetools) from 5.3.3 to 5.5.0. - [Changelog](https://github.com/tkem/cachetools/blob/master/CHANGELOG.rst) - [Commits](https://github.com/tkem/cachetools/compare/v5.3.3...v5.5.0) --- updated-dependencies: - dependency-name: cachetools dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 802c73d9f..b67188105 100644 --- a/poetry.lock +++ b/poetry.lock @@ -177,13 +177,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] From 940a7274f45d8b430a31188b944bd5ceabf27090 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 04:42:55 +0000 Subject: [PATCH 064/124] Bump mike from 2.1.2 to 2.1.3 Bumps [mike](https://github.com/jimporter/mike) from 2.1.2 to 2.1.3. - [Release notes](https://github.com/jimporter/mike/releases) - [Changelog](https://github.com/jimporter/mike/blob/master/CHANGES.md) - [Commits](https://github.com/jimporter/mike/compare/v2.1.2...v2.1.3) --- updated-dependencies: - dependency-name: mike dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 802c73d9f..908d9448c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1134,13 +1134,13 @@ files = [ [[package]] name = "mike" -version = "2.1.2" +version = "2.1.3" description = "Manage multiple versions of your MkDocs-powered documentation" optional = false python-versions = "*" files = [ - {file = "mike-2.1.2-py3-none-any.whl", hash = "sha256:d61d9b423ab412d634ca2bd520136d5114e3cc73f4bbd1aa6a0c6625c04918c0"}, - {file = "mike-2.1.2.tar.gz", hash = "sha256:d59cc8054c50f9c8a046cfd47f9b700cf9ff1b2b19f420bd8812ca6f94fa8bd3"}, + {file = "mike-2.1.3-py3-none-any.whl", hash = "sha256:d90c64077e84f06272437b464735130d380703a76a5738b152932884c60c062a"}, + {file = "mike-2.1.3.tar.gz", hash = "sha256:abd79b8ea483fb0275b7972825d3082e5ae67a41820f8d8a0dc7a3f49944e810"}, ] [package.dependencies] From 373058c33dfdbafc59a8480b37bf65599d9d88b5 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 20 Aug 2024 16:14:49 +0100 Subject: [PATCH 065/124] Enhance github workflows to download any workflow artifacts aswell --- bbot/modules/github_workflows.py | 74 ++++++++++++++++++- .../test_module_github_workflows.py | 42 ++++++++++- 2 files changed, 112 insertions(+), 4 deletions(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index d51da905e..df46f155c 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -9,7 +9,7 @@ class github_workflows(github): produced_events = ["FILESYSTEM"] flags = ["passive", "safe", "code-enum"] meta = { - "description": "Download a github repositories workflow logs", + "description": "Download a github repositories workflow logs and workflow artifacts", "created_date": "2024-04-29", "author": "@domwhewell-sage", } @@ -46,9 +46,9 @@ async def handle_event(self, event): self.log.debug(f"Looking up runs for {workflow_name} in {owner}/{repo}") for run in await self.get_workflow_runs(owner, repo, workflow_id): run_id = run.get("id") + workflow_url = f"https://github.com/{owner}/{repo}/actions/runs/{run_id}" self.log.debug(f"Downloading logs for {workflow_name}/{run_id} in {owner}/{repo}") for log in await self.download_run_logs(owner, repo, run_id): - workflow_url = f"https://github.com/{owner}/{repo}/actions/runs/{run_id}" logfile_event = self.make_event( { "path": str(log), @@ -62,6 +62,28 @@ async def handle_event(self, event): logfile_event, context=f"{{module}} downloaded workflow run logs from {workflow_url} to {{event.type}}: {log}", ) + artifacts = await self.get_run_artifacts(owner, repo, run_id) + if artifacts: + for artifact in artifacts: + artifact_id = artifact.get("id") + artifact_name = artifact.get("name") + expired = artifact.get("expired") + if not expired: + filepath = await self.download_run_artifacts(owner, repo, artifact_id, artifact_name) + if filepath: + artifact_event = self.make_event( + { + "path": str(filepath), + "description": f"Workflow run artifact from {workflow_url}", + }, + "FILESYSTEM", + tags=["zipfile"], + parent=event, + ) + await self.emit_event( + artifact_event, + context=f"{{module}} downloaded workflow run artifact from {workflow_url} to {{event.type}}: {filepath}", + ) async def get_workflows(self, owner, repo): workflows = [] @@ -150,3 +172,51 @@ async def download_run_logs(self, owner, repo, run_id): return main_logs else: return [] + + async def get_run_artifacts(self, owner, repo, run_id): + artifacts = [] + url = f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + r = await self.helpers.request(url, headers=self.headers) + if r is None: + return artifacts + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + return artifacts + if status_code != 200: + return artifacts + try: + j = r.json().get("artifacts", []) + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return artifacts + if not j: + return artifacts + for item in j: + artifacts.append(item) + return artifacts + + async def download_run_artifacts(self, owner, repo, artifact_id, artifact_name): + folder = self.output_dir / owner / repo + self.helpers.mkdir(folder) + file_destination = folder / artifact_name + try: + await self.helpers.download( + f"{self.base_url}/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/zip", + filename=file_destination, + headers=self.headers, + raise_error=True, + warn=False, + ) + self.info( + f"Downloaded workflow artifact {owner}/{repo}/{artifact_id}/{artifact_name} to {file_destination}" + ) + except Exception as e: + file_destination = None + response = getattr(e, "response", None) + status_code = getattr(response, "status_code", 0) + if status_code == 403: + self.warning( + f"The current access key does not have access to workflow artifacts {owner}/{repo}/{artifact_id} (status: {status_code})" + ) + return file_destination diff --git a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py index 7d7340947..f3c4a2cf5 100644 --- a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py +++ b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py @@ -437,9 +437,47 @@ async def setup_before_prep(self, module_test): url="https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02", content=self.zip_content, ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/artifacts", + json={ + "total_count": 1, + "artifacts": [ + { + "id": 1829832535, + "node_id": "MDg6QXJ0aWZhY3QxODI5ODMyNTM1", + "name": "build.tar.gz", + "size_in_bytes": 245770648, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535", + "archive_download_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535/zip", + "expired": False, + "created_at": "2024-08-19T22:32:17Z", + "updated_at": "2024-08-19T22:32:18Z", + "expires_at": "2024-09-02T22:21:59Z", + "workflow_run": { + "id": 10461468466, + "repository_id": 89290483, + "head_repository_id": 799444840, + "head_branch": "not-a-real-branch", + "head_sha": "1eeb5354ab7b1e4141b8a6473846e2a5ea0dd2c6", + }, + } + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535/zip", + headers={ + "location": "https://pipelinesghubeus22.actions.githubusercontent.com/uYHz4cw2WwYcB2EU57uoCs3MaEDiz8veiVlAtReP3xevBriD1h/_apis/pipelines/1/runs/214601/signedartifactscontent?artifactName=build.tar.gz&urlExpires=2024-08-20T14%3A41%3A41.8000556Z&urlSigningMethod=HMACV2&urlSignature=OOBxLx4eE5A8uHjxOIvQtn3cLFQOBW927mg0hcTHO6U%3D" + }, + status_code=302, + ) + module_test.httpx_mock.add_response( + url="https://pipelinesghubeus22.actions.githubusercontent.com/uYHz4cw2WwYcB2EU57uoCs3MaEDiz8veiVlAtReP3xevBriD1h/_apis/pipelines/1/runs/214601/signedartifactscontent?artifactName=build.tar.gz&urlExpires=2024-08-20T14%3A41%3A41.8000556Z&urlSigningMethod=HMACV2&urlSignature=OOBxLx4eE5A8uHjxOIvQtn3cLFQOBW927mg0hcTHO6U%3D", + content=self.zip_content, + ) def check(self, module_test, events): - assert len(events) == 7 + assert len(events) == 8 assert 1 == len( [ e @@ -473,7 +511,7 @@ def check(self, module_test, events): ] ), "Failed to find blacklanternsecurity github repo" filesystem_events = [e for e in events if e.type == "FILESYSTEM"] - assert 2 == len(filesystem_events), filesystem_events + assert 3 == len(filesystem_events), filesystem_events for filesystem_event in filesystem_events: file = Path(filesystem_event.data["path"]) assert file.is_file(), "Destination file does not exist" From 7bfbb29e0575ac21bcd8789f211a1b72fe94a9ee Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 20 Aug 2024 18:58:47 +0100 Subject: [PATCH 066/124] Added RawV2 to trufflehog output --- bbot/modules/trufflehog.py | 25 ++++++++++++++----- .../module_tests/test_module_trufflehog.py | 4 +-- 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index aacc23a77..002cdac9a 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -90,17 +90,25 @@ async def handle_event(self, event): host = event.host else: host = str(event.parent.host) - async for decoder_name, detector_name, raw_result, verified, source_metadata in self.execute_trufflehog( - module, path - ): + async for ( + decoder_name, + detector_name, + raw_result, + rawv2_result, + verified, + source_metadata, + ) in self.execute_trufflehog(module, path): if verified: data = { "severity": "High", - "description": f"Verified Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Secret: [{raw_result}] Details: [{source_metadata}]", + "description": f"Verified Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Details: [{source_metadata}]", "host": host, } if description: data["description"] += f" Description: [{description}]" + data["description"] += f" Raw result: [{raw_result}]" + if rawv2_result: + data["description"] += f" RawV2 result: [{rawv2_result}]" await self.emit_event( data, "VULNERABILITY", @@ -109,11 +117,14 @@ async def handle_event(self, event): ) else: data = { - "description": f"Potential Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Secret: [{raw_result}] Details: [{source_metadata}]", + "description": f"Potential Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Details: [{source_metadata}]", "host": host, } if description: data["description"] += f" Description: [{description}]" + data["description"] += f" Raw result: [{raw_result}]" + if rawv2_result: + data["description"] += f" RawV2 result: [{rawv2_result}]" await self.emit_event( data, "FINDING", @@ -162,11 +173,13 @@ async def execute_trufflehog(self, module, path): raw_result = j.get("Raw", "") + rawv2_result = j.get("RawV2", "") + verified = j.get("Verified", False) source_metadata = j.get("SourceMetadata", {}) - yield (decoder_name, detector_name, raw_result, verified, source_metadata) + yield (decoder_name, detector_name, raw_result, rawv2_result, verified, source_metadata) finally: stats_file.unlink() diff --git a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py index 9cd5be601..8873d9255 100644 --- a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py +++ b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py @@ -851,7 +851,7 @@ def check(self, module_test, events): if e.type == "VULNERABILITY" and (e.data["host"] == "hub.docker.com" or e.data["host"] == "github.com") and "Verified Secret Found." in e.data["description"] - and "Secret: [https://admin:admin@the-internet.herokuapp.com]" in e.data["description"] + and "Raw result: [https://admin:admin@the-internet.herokuapp.com]" in e.data["description"] ] assert 3 == len(vuln_events), "Failed to find secret in events" github_repo_event = [e for e in vuln_events if "test_keys" in e.data["description"]][0].parent @@ -898,7 +898,7 @@ def check(self, module_test, events): if e.type == e.type == "FINDING" and (e.data["host"] == "hub.docker.com" or e.data["host"] == "github.com") and "Potential Secret Found." in e.data["description"] - and "Secret: [https://admin:admin@internal.host.com]" in e.data["description"] + and "Raw result: [https://admin:admin@internal.host.com]" in e.data["description"] ] assert 3 == len(finding_events), "Failed to find secret in events" github_repo_event = [e for e in finding_events if "test_keys" in e.data["description"]][0].parent From 06fe2a9fcb8baa341acca827bf9b56c4b58b41e1 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 20 Aug 2024 21:19:22 +0100 Subject: [PATCH 067/124] Add a test for the RawV2 result --- bbot/test/test_step_2/module_tests/test_module_trufflehog.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py index 8873d9255..6f845c251 100644 --- a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py +++ b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py @@ -852,6 +852,7 @@ def check(self, module_test, events): and (e.data["host"] == "hub.docker.com" or e.data["host"] == "github.com") and "Verified Secret Found." in e.data["description"] and "Raw result: [https://admin:admin@the-internet.herokuapp.com]" in e.data["description"] + and "RawV2 result: [https://admin:admin@the-internet.herokuapp.com/basic_auth]" ] assert 3 == len(vuln_events), "Failed to find secret in events" github_repo_event = [e for e in vuln_events if "test_keys" in e.data["description"]][0].parent From c1415cc72e615c17af89ee2d58e5a4d382548c35 Mon Sep 17 00:00:00 2001 From: TheTechromancer <20261699+TheTechromancer@users.noreply.github.com> Date: Tue, 20 Aug 2024 16:47:08 -0400 Subject: [PATCH 068/124] Update test_module_trufflehog.py --- bbot/test/test_step_2/module_tests/test_module_trufflehog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py index 6f845c251..da72ff354 100644 --- a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py +++ b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py @@ -852,7 +852,7 @@ def check(self, module_test, events): and (e.data["host"] == "hub.docker.com" or e.data["host"] == "github.com") and "Verified Secret Found." in e.data["description"] and "Raw result: [https://admin:admin@the-internet.herokuapp.com]" in e.data["description"] - and "RawV2 result: [https://admin:admin@the-internet.herokuapp.com/basic_auth]" + and "RawV2 result: [https://admin:admin@the-internet.herokuapp.com/basic_auth]" in e.data["description"] ] assert 3 == len(vuln_events), "Failed to find secret in events" github_repo_event = [e for e in vuln_events if "test_keys" in e.data["description"]][0].parent From 73739ca5ba3cb5afcc41cef40526e889188f4d33 Mon Sep 17 00:00:00 2001 From: Dominic Whewell <122788350+domwhewell-sage@users.noreply.github.com> Date: Wed, 21 Aug 2024 15:47:53 +0100 Subject: [PATCH 069/124] Split out the owner from the repository URL and use that as the containing folder --- bbot/modules/git_clone.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/bbot/modules/git_clone.py b/bbot/modules/git_clone.py index dbf24e91e..9f0441304 100644 --- a/bbot/modules/git_clone.py +++ b/bbot/modules/git_clone.py @@ -46,11 +46,14 @@ async def handle_event(self, event): ) async def clone_git_repository(self, repository_url): + owner = repository_url.split("/")[-2] + folder = self.output_dir / owner + self.helpers.mkdir(folder) if self.api_key: url = repository_url.replace("https://github.com", f"https://user:{self.api_key}@github.com") else: url = repository_url - command = ["git", "-C", self.output_dir, "clone", url] + command = ["git", "-C", folder, "clone", url] try: output = await self.run_process(command, env={"GIT_TERMINAL_PROMPT": "0"}, check=True) except CalledProcessError as e: @@ -58,4 +61,4 @@ async def clone_git_repository(self, repository_url): return folder_name = output.stderr.split("Cloning into '")[1].split("'")[0] - return self.output_dir / folder_name + return self.output_dir / folder / folder_name From ff53e242522feaf2c1cb773be20bccc7ae27322f Mon Sep 17 00:00:00 2001 From: Dominic Whewell <122788350+domwhewell-sage@users.noreply.github.com> Date: Wed, 21 Aug 2024 16:34:53 +0100 Subject: [PATCH 070/124] Forgot to update the test --- bbot/test/test_step_2/module_tests/test_module_git_clone.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_git_clone.py b/bbot/test/test_step_2/module_tests/test_module_git_clone.py index cd59f5dc2..15bc54fb3 100644 --- a/bbot/test/test_step_2/module_tests/test_module_git_clone.py +++ b/bbot/test/test_step_2/module_tests/test_module_git_clone.py @@ -196,7 +196,7 @@ def check(self, module_test, events): e for e in events if e.type == "FILESYSTEM" - and "git_repos/test_keys" in e.data["path"] + and "git_repos/.bbot_test/test_keys" in e.data["path"] and "git" in e.tags and e.scope_distance == 1 ] From d5a881b917c4b7c3e3c6afcbd6bd8d9172de9482 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 21 Aug 2024 17:14:54 +0100 Subject: [PATCH 071/124] More test corrections --- bbot/test/test_step_2/module_tests/test_module_trufflehog.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py index da72ff354..7dde0d38a 100644 --- a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py +++ b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py @@ -868,7 +868,7 @@ def check(self, module_test, events): [ e for e in filesystem_events - if e.data["path"].endswith("/git_repos/test_keys") and Path(e.data["path"]).is_dir() + if e.data["path"].endswith("/git_repos/.bbot_test/test_keys") and Path(e.data["path"]).is_dir() ] ), "Test keys repo dir does not exist" assert 1 == len( @@ -915,7 +915,7 @@ def check(self, module_test, events): [ e for e in filesystem_events - if e.data["path"].endswith("/git_repos/test_keys") and Path(e.data["path"]).is_dir() + if e.data["path"].endswith("/git_repos/.bbot_test/test_keys") and Path(e.data["path"]).is_dir() ] ), "Test keys repo dir does not exist" assert 1 == len( From ad6de812687f0761144a4ce8391a03072ae0ceb3 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 21 Aug 2024 18:46:41 +0100 Subject: [PATCH 072/124] removed unnecessary `self.output_dir` --- bbot/modules/git_clone.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/git_clone.py b/bbot/modules/git_clone.py index 9f0441304..4b64ee480 100644 --- a/bbot/modules/git_clone.py +++ b/bbot/modules/git_clone.py @@ -61,4 +61,4 @@ async def clone_git_repository(self, repository_url): return folder_name = output.stderr.split("Cloning into '")[1].split("'")[0] - return self.output_dir / folder / folder_name + return folder / folder_name From 40b034b5b6e3c8f46ac3737c71594fb7f166a5b7 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Fri, 23 Aug 2024 20:06:38 -0400 Subject: [PATCH 073/124] fixing web_parameters appearing when no modules listening --- bbot/modules/internal/excavate.py | 58 ++++++++++++++++--------------- 1 file changed, 30 insertions(+), 28 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index bf8b22516..e542aa20a 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -857,33 +857,35 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon decoded_data = await self.helpers.re.recursive_decode(data) - content_type_lower = content_type.lower() if content_type else "" - extraction_map = { - "json": self.helpers.extract_params_json, - "xml": self.helpers.extract_params_xml, - } - - for source_type, extract_func in extraction_map.items(): - if source_type in content_type_lower: - results = extract_func(data) - if results: - for parameter_name, original_value in results: - description = ( - f"HTTP Extracted Parameter (speculative from {source_type} content) [{parameter_name}]" - ) - data = { - "host": str(event.host), - "type": "SPECULATIVE", - "name": parameter_name, - "original_value": original_value, - "url": str(event.data["url"]), - "additional_params": {}, - "assigned_cookies": self.assigned_cookies, - "description": description, - } - context = f"excavate's Parameter extractor found a speculative WEB_PARAMETER: {parameter_name} by parsing {source_type} data from {str(event.host)}" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) - return + if self.parameter_extraction: + + content_type_lower = content_type.lower() if content_type else "" + extraction_map = { + "json": self.helpers.extract_params_json, + "xml": self.helpers.extract_params_xml, + } + + for source_type, extract_func in extraction_map.items(): + if source_type in content_type_lower: + results = extract_func(data) + if results: + for parameter_name, original_value in results: + description = ( + f"HTTP Extracted Parameter (speculative from {source_type} content) [{parameter_name}]" + ) + data = { + "host": str(event.host), + "type": "SPECULATIVE", + "name": parameter_name, + "original_value": original_value, + "url": str(event.data["url"]), + "additional_params": {}, + "assigned_cookies": self.assigned_cookies, + "description": description, + } + context = f"excavate's Parameter extractor found a speculative WEB_PARAMETER: {parameter_name} by parsing {source_type} data from {str(event.host)}" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + return for result in self.yara_rules.match(data=f"{data}\n{decoded_data}"): rule_name = result.rule @@ -938,7 +940,7 @@ async def handle_event(self, event): for header, header_values in headers.items(): for header_value in header_values: - if header.lower() == "set-cookie": + if header.lower() == "set-cookie" and self.parameter_extraction: if "=" not in header_value: self.debug(f"Cookie found without '=': {header_value}") continue From 696ab918f1c7a75422847e11b6ddc014f94792c2 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Fri, 23 Aug 2024 20:38:10 -0400 Subject: [PATCH 074/124] optimize generate_templist --- bbot/modules/deadly/ffuf.py | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index e0e88fbe8..e5ac0a0db 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -312,18 +312,29 @@ async def execute_ffuf( def generate_templist(self, prefix=None): line_count = 0 - virtual_file = [] + + if prefix: + prefix = prefix.strip().lower() + + max_lines = self.config.get("lines") + banned_set = set(self.banned_characters) + for idx, val in enumerate(self.wordlist_lines): - if idx > self.config.get("lines"): + if idx > max_lines: break - if len(val) > 0: - if val.strip().lower() in self.blacklist: - self.debug(f"Skipping adding [{val.strip()}] to wordlist because it was in the blacklist") + stripped_val = val.strip().lower() + if stripped_val: + # Check if the word is in the blacklist + if stripped_val in self.blacklist: + self.debug(f"Skipping adding [{stripped_val}] to wordlist because it was in the blacklist") else: - if not prefix or val.strip().lower().startswith(prefix.strip().lower()): - if not any(char in val.strip().lower() for char in self.banned_characters): + # Check if it starts with the given prefix (if any) + if not prefix or stripped_val.startswith(prefix): + # Check if it contains any banned characters + if not any(char in banned_set for char in stripped_val): line_count += 1 - virtual_file.append(f"{val.strip().lower()}") + virtual_file.append(stripped_val) + virtual_file.append(self.canary) return self.helpers.tempfile(virtual_file, pipe=False), line_count From 2e88ac4a3bf1e48358f8439cef869ec04ab1baa3 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Fri, 23 Aug 2024 20:48:55 -0400 Subject: [PATCH 075/124] fixing excavate header parameter test --- bbot/test/test_step_2/module_tests/test_module_excavate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 576e1de33..3279d3c5d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -856,7 +856,7 @@ def check(self, module_test, events): class TestExcavateHeaders(ModuleTestBase): targets = ["http://127.0.0.1:8888/"] - modules_overrides = ["excavate", "httpx"] + modules_overrides = ["excavate", "httpx", "hunt"] config_overrides = {"web": {"spider_distance": 1, "spider_depth": 1}} async def setup_before_prep(self, module_test): From 0e33cd9587995123de930684b0764f4c52b7942b Mon Sep 17 00:00:00 2001 From: liquidsec Date: Sat, 24 Aug 2024 08:59:09 -0400 Subject: [PATCH 076/124] variable cleanup --- bbot/modules/deadly/ffuf.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index e5ac0a0db..8995dd3dc 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -28,13 +28,13 @@ class ffuf(BaseModule): deps_common = ["ffuf"] - banned_characters = [" "] - + banned_characters = set([" "]) blacklist = ["images", "css", "image"] in_scope_only = True async def setup(self): + self.canary = "".join(random.choice(string.ascii_lowercase) for i in range(10)) wordlist_url = self.config.get("wordlist", "") self.debug(f"Using wordlist [{wordlist_url}]") @@ -318,7 +318,6 @@ def generate_templist(self, prefix=None): prefix = prefix.strip().lower() max_lines = self.config.get("lines") - banned_set = set(self.banned_characters) for idx, val in enumerate(self.wordlist_lines): if idx > max_lines: @@ -332,7 +331,7 @@ def generate_templist(self, prefix=None): # Check if it starts with the given prefix (if any) if not prefix or stripped_val.startswith(prefix): # Check if it contains any banned characters - if not any(char in banned_set for char in stripped_val): + if not any(char in self.banned_characters for char in stripped_val): line_count += 1 virtual_file.append(stripped_val) From 08ad1ba042ba85f375ecdee6b0334d15f065d217 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 04:19:55 +0000 Subject: [PATCH 077/124] Bump cloudcheck from 5.0.1.415 to 5.0.1.515 Bumps [cloudcheck](https://github.com/blacklanternsecurity/cloudcheck) from 5.0.1.415 to 5.0.1.515. - [Commits](https://github.com/blacklanternsecurity/cloudcheck/commits) --- updated-dependencies: - dependency-name: cloudcheck dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index e44387d8a..1debd9e87 100644 --- a/poetry.lock +++ b/poetry.lock @@ -387,13 +387,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cloudcheck" -version = "5.0.1.415" +version = "5.0.1.515" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "cloudcheck-5.0.1.415-py3-none-any.whl", hash = "sha256:e5f728106ddc2cdf43ee5a654d6ec069572ea925d30daec913c9a5a07209a52e"}, - {file = "cloudcheck-5.0.1.415.tar.gz", hash = "sha256:ef3f7351dde77c298d46d48dd69919c6c6d2563aeece46aa35ecd5281cbff0dd"}, + {file = "cloudcheck-5.0.1.515-py3-none-any.whl", hash = "sha256:427ee423b9abca9f742f21300c3968dde8784cbfdd99ba69b336a0a6723fe677"}, + {file = "cloudcheck-5.0.1.515.tar.gz", hash = "sha256:64c7c22567a3ae14731b4826c631585a9714858cc9dd70fafbfa40b5cef37049"}, ] [package.dependencies] From 45cf8a59962015be81d5c3980f131bd6c9afb0e4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 04:20:44 +0000 Subject: [PATCH 078/124] Bump pyzmq from 26.1.0 to 26.2.0 Bumps [pyzmq](https://github.com/zeromq/pyzmq) from 26.1.0 to 26.2.0. - [Release notes](https://github.com/zeromq/pyzmq/releases) - [Commits](https://github.com/zeromq/pyzmq/compare/v26.1.0...v26.2.0) --- updated-dependencies: - dependency-name: pyzmq dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 220 ++++++++++++++++++++++++++-------------------------- 1 file changed, 110 insertions(+), 110 deletions(-) diff --git a/poetry.lock b/poetry.lock index e44387d8a..23137f127 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2110,120 +2110,120 @@ pyyaml = "*" [[package]] name = "pyzmq" -version = "26.1.0" +version = "26.2.0" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" files = [ - {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:263cf1e36862310bf5becfbc488e18d5d698941858860c5a8c079d1511b3b18e"}, - {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d5c8b17f6e8f29138678834cf8518049e740385eb2dbf736e8f07fc6587ec682"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a95c2358fcfdef3374cb8baf57f1064d73246d55e41683aaffb6cfe6862917"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f99de52b8fbdb2a8f5301ae5fc0f9e6b3ba30d1d5fc0421956967edcc6914242"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bcbfbab4e1895d58ab7da1b5ce9a327764f0366911ba5b95406c9104bceacb0"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77ce6a332c7e362cb59b63f5edf730e83590d0ab4e59c2aa5bd79419a42e3449"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba0a31d00e8616149a5ab440d058ec2da621e05d744914774c4dde6837e1f545"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8b88641384e84a258b740801cd4dbc45c75f148ee674bec3149999adda4a8598"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2fa76ebcebe555cce90f16246edc3ad83ab65bb7b3d4ce408cf6bc67740c4f88"}, - {file = "pyzmq-26.1.0-cp310-cp310-win32.whl", hash = "sha256:fbf558551cf415586e91160d69ca6416f3fce0b86175b64e4293644a7416b81b"}, - {file = "pyzmq-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a7b8aab50e5a288c9724d260feae25eda69582be84e97c012c80e1a5e7e03fb2"}, - {file = "pyzmq-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:08f74904cb066e1178c1ec706dfdb5c6c680cd7a8ed9efebeac923d84c1f13b1"}, - {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:46d6800b45015f96b9d92ece229d92f2aef137d82906577d55fadeb9cf5fcb71"}, - {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bc2431167adc50ba42ea3e5e5f5cd70d93e18ab7b2f95e724dd8e1bd2c38120"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3bb34bebaa1b78e562931a1687ff663d298013f78f972a534f36c523311a84d"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3f6329340cef1c7ba9611bd038f2d523cea79f09f9c8f6b0553caba59ec562"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:471880c4c14e5a056a96cd224f5e71211997d40b4bf5e9fdded55dafab1f98f2"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ce6f2b66799971cbae5d6547acefa7231458289e0ad481d0be0740535da38d8b"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a1f6ea5b1d6cdbb8cfa0536f0d470f12b4b41ad83625012e575f0e3ecfe97f0"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b45e6445ac95ecb7d728604bae6538f40ccf4449b132b5428c09918523abc96d"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:94c4262626424683feea0f3c34951d39d49d354722db2745c42aa6bb50ecd93b"}, - {file = "pyzmq-26.1.0-cp311-cp311-win32.whl", hash = "sha256:a0f0ab9df66eb34d58205913f4540e2ad17a175b05d81b0b7197bc57d000e829"}, - {file = "pyzmq-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8efb782f5a6c450589dbab4cb0f66f3a9026286333fe8f3a084399149af52f29"}, - {file = "pyzmq-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f133d05aaf623519f45e16ab77526e1e70d4e1308e084c2fb4cedb1a0c764bbb"}, - {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3d3146b1c3dcc8a1539e7cc094700b2be1e605a76f7c8f0979b6d3bde5ad4072"}, - {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d9270fbf038bf34ffca4855bcda6e082e2c7f906b9eb8d9a8ce82691166060f7"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995301f6740a421afc863a713fe62c0aaf564708d4aa057dfdf0f0f56525294b"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7eca8b89e56fb8c6c26dd3e09bd41b24789022acf1cf13358e96f1cafd8cae3"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d4feb2e83dfe9ace6374a847e98ee9d1246ebadcc0cb765482e272c34e5820"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d4fafc2eb5d83f4647331267808c7e0c5722c25a729a614dc2b90479cafa78bd"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58c33dc0e185dd97a9ac0288b3188d1be12b756eda67490e6ed6a75cf9491d79"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:68a0a1d83d33d8367ddddb3e6bb4afbb0f92bd1dac2c72cd5e5ddc86bdafd3eb"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ae7c57e22ad881af78075e0cea10a4c778e67234adc65c404391b417a4dda83"}, - {file = "pyzmq-26.1.0-cp312-cp312-win32.whl", hash = "sha256:347e84fc88cc4cb646597f6d3a7ea0998f887ee8dc31c08587e9c3fd7b5ccef3"}, - {file = "pyzmq-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:9f136a6e964830230912f75b5a116a21fe8e34128dcfd82285aa0ef07cb2c7bd"}, - {file = "pyzmq-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4b7a989c8f5a72ab1b2bbfa58105578753ae77b71ba33e7383a31ff75a504c4"}, - {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d416f2088ac8f12daacffbc2e8918ef4d6be8568e9d7155c83b7cebed49d2322"}, - {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:ecb6c88d7946166d783a635efc89f9a1ff11c33d680a20df9657b6902a1d133b"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:471312a7375571857a089342beccc1a63584315188560c7c0da7e0a23afd8a5c"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6cea102ffa16b737d11932c426f1dc14b5938cf7bc12e17269559c458ac334"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec7248673ffc7104b54e4957cee38b2f3075a13442348c8d651777bf41aa45ee"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:0614aed6f87d550b5cecb03d795f4ddbb1544b78d02a4bd5eecf644ec98a39f6"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e8746ce968be22a8a1801bf4a23e565f9687088580c3ed07af5846580dd97f76"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:7688653574392d2eaeef75ddcd0b2de5b232d8730af29af56c5adf1df9ef8d6f"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8d4dac7d97f15c653a5fedcafa82626bd6cee1450ccdaf84ffed7ea14f2b07a4"}, - {file = "pyzmq-26.1.0-cp313-cp313-win32.whl", hash = "sha256:ccb42ca0a4a46232d716779421bbebbcad23c08d37c980f02cc3a6bd115ad277"}, - {file = "pyzmq-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e1e5d0a25aea8b691a00d6b54b28ac514c8cc0d8646d05f7ca6cb64b97358250"}, - {file = "pyzmq-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:fc82269d24860cfa859b676d18850cbb8e312dcd7eada09e7d5b007e2f3d9eb1"}, - {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:416ac51cabd54f587995c2b05421324700b22e98d3d0aa2cfaec985524d16f1d"}, - {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:ff832cce719edd11266ca32bc74a626b814fff236824aa1aeaad399b69fe6eae"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:393daac1bcf81b2a23e696b7b638eedc965e9e3d2112961a072b6cd8179ad2eb"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9869fa984c8670c8ab899a719eb7b516860a29bc26300a84d24d8c1b71eae3ec"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b3b8e36fd4c32c0825b4461372949ecd1585d326802b1321f8b6dc1d7e9318c"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3ee647d84b83509b7271457bb428cc347037f437ead4b0b6e43b5eba35fec0aa"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:45cb1a70eb00405ce3893041099655265fabcd9c4e1e50c330026e82257892c1"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:5cca7b4adb86d7470e0fc96037771981d740f0b4cb99776d5cb59cd0e6684a73"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:91d1a20bdaf3b25f3173ff44e54b1cfbc05f94c9e8133314eb2962a89e05d6e3"}, - {file = "pyzmq-26.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c0665d85535192098420428c779361b8823d3d7ec4848c6af3abb93bc5c915bf"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:96d7c1d35ee4a495df56c50c83df7af1c9688cce2e9e0edffdbf50889c167595"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b281b5ff5fcc9dcbfe941ac5c7fcd4b6c065adad12d850f95c9d6f23c2652384"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5384c527a9a004445c5074f1e20db83086c8ff1682a626676229aafd9cf9f7d1"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:754c99a9840839375ee251b38ac5964c0f369306eddb56804a073b6efdc0cd88"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9bdfcb74b469b592972ed881bad57d22e2c0acc89f5e8c146782d0d90fb9f4bf"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bd13f0231f4788db619347b971ca5f319c5b7ebee151afc7c14632068c6261d3"}, - {file = "pyzmq-26.1.0-cp37-cp37m-win32.whl", hash = "sha256:c5668dac86a869349828db5fc928ee3f58d450dce2c85607067d581f745e4fb1"}, - {file = "pyzmq-26.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad875277844cfaeca7fe299ddf8c8d8bfe271c3dc1caf14d454faa5cdbf2fa7a"}, - {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:65c6e03cc0222eaf6aad57ff4ecc0a070451e23232bb48db4322cc45602cede0"}, - {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:038ae4ffb63e3991f386e7fda85a9baab7d6617fe85b74a8f9cab190d73adb2b"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bdeb2c61611293f64ac1073f4bf6723b67d291905308a7de9bb2ca87464e3273"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:61dfa5ee9d7df297c859ac82b1226d8fefaf9c5113dc25c2c00ecad6feeeb04f"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3292d384537b9918010769b82ab3e79fca8b23d74f56fc69a679106a3e2c2cf"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f9499c70c19ff0fbe1007043acb5ad15c1dec7d8e84ab429bca8c87138e8f85c"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d3dd5523ed258ad58fed7e364c92a9360d1af8a9371e0822bd0146bdf017ef4c"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baba2fd199b098c5544ef2536b2499d2e2155392973ad32687024bd8572a7d1c"}, - {file = "pyzmq-26.1.0-cp38-cp38-win32.whl", hash = "sha256:ddbb2b386128d8eca92bd9ca74e80f73fe263bcca7aa419f5b4cbc1661e19741"}, - {file = "pyzmq-26.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:79e45a4096ec8388cdeb04a9fa5e9371583bcb826964d55b8b66cbffe7b33c86"}, - {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:add52c78a12196bc0fda2de087ba6c876ea677cbda2e3eba63546b26e8bf177b"}, - {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c03bd7f3339ff47de7ea9ac94a2b34580a8d4df69b50128bb6669e1191a895"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dcc37d9d708784726fafc9c5e1232de655a009dbf97946f117aefa38d5985a0f"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a6ed52f0b9bf8dcc64cc82cce0607a3dfed1dbb7e8c6f282adfccc7be9781de"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451e16ae8bea3d95649317b463c9f95cd9022641ec884e3d63fc67841ae86dfe"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:906e532c814e1d579138177a00ae835cd6becbf104d45ed9093a3aaf658f6a6a"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05bacc4f94af468cc82808ae3293390278d5f3375bb20fef21e2034bb9a505b6"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:57bb2acba798dc3740e913ffadd56b1fcef96f111e66f09e2a8db3050f1f12c8"}, - {file = "pyzmq-26.1.0-cp39-cp39-win32.whl", hash = "sha256:f774841bb0e8588505002962c02da420bcfb4c5056e87a139c6e45e745c0e2e2"}, - {file = "pyzmq-26.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:359c533bedc62c56415a1f5fcfd8279bc93453afdb0803307375ecf81c962402"}, - {file = "pyzmq-26.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:7907419d150b19962138ecec81a17d4892ea440c184949dc29b358bc730caf69"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b24079a14c9596846bf7516fe75d1e2188d4a528364494859106a33d8b48be38"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59d0acd2976e1064f1b398a00e2c3e77ed0a157529779e23087d4c2fb8aaa416"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:911c43a4117915203c4cc8755e0f888e16c4676a82f61caee2f21b0c00e5b894"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10163e586cc609f5f85c9b233195554d77b1e9a0801388907441aaeb22841c5"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:28a8b2abb76042f5fd7bd720f7fea48c0fd3e82e9de0a1bf2c0de3812ce44a42"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bef24d3e4ae2c985034439f449e3f9e06bf579974ce0e53d8a507a1577d5b2ab"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2cd0f4d314f4a2518e8970b6f299ae18cff7c44d4a1fc06fc713f791c3a9e3ea"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa25a620eed2a419acc2cf10135b995f8f0ce78ad00534d729aa761e4adcef8a"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef3b048822dca6d231d8a8ba21069844ae38f5d83889b9b690bf17d2acc7d099"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:9a6847c92d9851b59b9f33f968c68e9e441f9a0f8fc972c5580c5cd7cbc6ee24"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9b9305004d7e4e6a824f4f19b6d8f32b3578aad6f19fc1122aaf320cbe3dc83"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:63c1d3a65acb2f9c92dce03c4e1758cc552f1ae5c78d79a44e3bb88d2fa71f3a"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d36b8fffe8b248a1b961c86fbdfa0129dfce878731d169ede7fa2631447331be"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67976d12ebfd61a3bc7d77b71a9589b4d61d0422282596cf58c62c3866916544"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:998444debc8816b5d8d15f966e42751032d0f4c55300c48cc337f2b3e4f17d03"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5c88b2f13bcf55fee78ea83567b9fe079ba1a4bef8b35c376043440040f7edb"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d906d43e1592be4b25a587b7d96527cb67277542a5611e8ea9e996182fae410"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b0c9942430d731c786545da6be96d824a41a51742e3e374fedd9018ea43106"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:314d11564c00b77f6224d12eb3ddebe926c301e86b648a1835c5b28176c83eab"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:093a1a3cae2496233f14b57f4b485da01b4ff764582c854c0f42c6dd2be37f3d"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3c397b1b450f749a7e974d74c06d69bd22dd362142f370ef2bd32a684d6b480c"}, - {file = "pyzmq-26.1.0.tar.gz", hash = "sha256:6c5aeea71f018ebd3b9115c7cb13863dd850e98ca6b9258509de1246461a7e7f"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, + {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, + {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, + {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, + {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, + {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, + {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, + {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, + {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, + {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, ] [package.dependencies] From 74de67a733ada2020f82e214c8013b0bca392778 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 04:21:04 +0000 Subject: [PATCH 079/124] Bump pytest-asyncio from 0.23.8 to 0.24.0 Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.23.8 to 0.24.0. - [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) - [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.23.8...v0.24.0) --- updated-dependencies: - dependency-name: pytest-asyncio dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index e44387d8a..a447f095c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1887,17 +1887,17 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-asyncio" -version = "0.23.8" +version = "0.24.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, - {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, ] [package.dependencies] -pytest = ">=7.0.0,<9" +pytest = ">=8.2,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] @@ -2973,4 +2973,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "d7c83d3aede8138e801e2936d5d63b5d60f4f9ab630581a1b0831c3cc9190bcc" +content-hash = "d5a58b845248d60d5cfc5111d6e611486e9137479952f180e2e01d719e440746" diff --git a/pyproject.toml b/pyproject.toml index feac446ec..18fae5509 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,7 @@ pytest-timeout = "^2.3.1" pytest-httpx = "^0.30.0" pytest-httpserver = "^1.0.11" pytest = "^8.3.1" -pytest-asyncio = "^0.23.8" +pytest-asyncio = ">=0.23.8,<0.25.0" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" From 0749c15e066a4884d170a6ad800e08cc3cbcfc4e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 04:21:25 +0000 Subject: [PATCH 080/124] Bump idna from 3.7 to 3.8 Bumps [idna](https://github.com/kjd/idna) from 3.7 to 3.8. - [Release notes](https://github.com/kjd/idna/releases) - [Changelog](https://github.com/kjd/idna/blob/master/HISTORY.rst) - [Commits](https://github.com/kjd/idna/compare/v3.7...v3.8) --- updated-dependencies: - dependency-name: idna dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index e44387d8a..09fd51438 100644 --- a/poetry.lock +++ b/poetry.lock @@ -757,13 +757,13 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] From 5bac76a7407c1651847c2ca67cf2cff124f373c0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 04:21:44 +0000 Subject: [PATCH 081/124] Bump werkzeug from 3.0.3 to 3.0.4 Bumps [werkzeug](https://github.com/pallets/werkzeug) from 3.0.3 to 3.0.4. - [Release notes](https://github.com/pallets/werkzeug/releases) - [Changelog](https://github.com/pallets/werkzeug/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/werkzeug/compare/3.0.3...3.0.4) --- updated-dependencies: - dependency-name: werkzeug dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index e44387d8a..1e06519aa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2816,13 +2816,13 @@ files = [ [[package]] name = "werkzeug" -version = "3.0.3" +version = "3.0.4" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, + {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, + {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, ] [package.dependencies] From 54a499f22f0be42d51e53ff62c8951855dc1d02b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 04:22:01 +0000 Subject: [PATCH 082/124] Bump mkdocs-material from 9.5.31 to 9.5.33 Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.31 to 9.5.33. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.31...9.5.33) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index e44387d8a..fc978841d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1239,13 +1239,13 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.31" +version = "9.5.33" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.31-py3-none-any.whl", hash = "sha256:1b1f49066fdb3824c1e96d6bacd2d4375de4ac74580b47e79ff44c4d835c5fcb"}, - {file = "mkdocs_material-9.5.31.tar.gz", hash = "sha256:31833ec664772669f5856f4f276bf3fdf0e642a445e64491eda459249c3a1ca8"}, + {file = "mkdocs_material-9.5.33-py3-none-any.whl", hash = "sha256:dbc79cf0fdc6e2c366aa987de8b0c9d4e2bb9f156e7466786ba2fd0f9bf7ffca"}, + {file = "mkdocs_material-9.5.33.tar.gz", hash = "sha256:d23a8b5e3243c9b2f29cdfe83051104a8024b767312dc8fde05ebe91ad55d89d"}, ] [package.dependencies] From 9522dd21c247bc48bfa5f27e15ffffcae820cf4e Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 18 Aug 2024 21:56:50 -0400 Subject: [PATCH 083/124] better engine debugging --- bbot/core/engine.py | 52 +++++++++++++++++++++++++-------------------- 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index d5fb2ec13..c1f5b3596 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -138,9 +138,11 @@ def __init__(self, debug=False, **kwargs): def check_error(self, message): if isinstance(message, dict) and len(message) == 1 and "_e" in message: + self.debug(f"{self.name}: got error message: {message}") error, trace = message["_e"] error = self.ERROR_CLASS(error) error.engine_traceback = trace + self.debug(f"{self.name}: raising {error.__class__.__name__}") raise error return False @@ -404,21 +406,22 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs): with self.client_id_context(client_id): try: self.debug(f"{self.name}: run-and-return {fn_str}") - result = error_sentinel try: result = await command_fn(*args, **kwargs) except BaseException as e: - if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): - error = f"Error in {self.name}.{fn_str}: {e}" - self.debug(error) - trace = traceback.format_exc() - self.debug(trace) - result = {"_e": (error, trace)} + if in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + log_fn = self.log.debug + else: + log_fn = self.log.error + error = f"Error in {self.name} {fn_str}: {e}" + trace = traceback.format_exc() + log_fn(error) + self.log.trace(trace) + result = {"_e": (error, trace)} finally: self.tasks.pop(client_id, None) - if result is not error_sentinel: - self.debug(f"{self.name}: Sending response to {fn_str}: {result}") - await self.send_socket_multipart(client_id, result) + self.debug(f"{self.name}: Sending response to {fn_str}: {result}") + await self.send_socket_multipart(client_id, result) except BaseException as e: self.log.critical( f"Unhandled exception in {self.name}.run_and_return({client_id}, {command_fn}, {args}, {kwargs}): {e}" @@ -437,13 +440,16 @@ async def run_and_yield(self, client_id, command_fn, *args, **kwargs): self.debug(f"{self.name}: sending iteration for {command_fn.__name__}(): {_}") await self.send_socket_multipart(client_id, _) except BaseException as e: - if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): - error = f"Error in {self.name}.{fn_str}: {e}" - trace = traceback.format_exc() - self.debug(error) - self.debug(trace) - result = {"_e": (error, trace)} - await self.send_socket_multipart(client_id, result) + if in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + log_fn = self.log.debug + else: + log_fn = self.log.error + error = f"Error in {self.name} {fn_str}: {e}" + trace = traceback.format_exc() + log_fn(error) + self.log.trace(trace) + result = {"_e": (error, trace)} + await self.send_socket_multipart(client_id, result) finally: self.debug(f"{self.name} reached end of run-and-yield iteration for {command_fn.__name__}()") # _s == special signal that means StopIteration @@ -475,7 +481,7 @@ async def worker(self): while 1: client_id, binary = await self.socket.recv_multipart() message = self.unpickle(binary) - # self.log.debug(f"{self.name} got message: {message}") + self.debug(f"{self.name} got message: {message}") if self.check_error(message): continue @@ -493,7 +499,7 @@ async def worker(self): # -99 == shutdown task if cmd == -99: - self.debug(f"{self.name} got shutdown signal") + self.log.verbose(f"{self.name} got shutdown signal") await self.send_socket_multipart(client_id, {"m": "SHUTDOWN_OK"}) await self._shutdown() return @@ -515,16 +521,16 @@ async def worker(self): continue if inspect.isasyncgenfunction(command_fn): - # self.log.debug(f"{self.name}: creating run-and-yield coroutine for {command_name}()") + self.debug(f"{self.name}: creating run-and-yield coroutine for {command_name}()") coroutine = self.run_and_yield(client_id, command_fn, *args, **kwargs) else: - # self.log.debug(f"{self.name}: creating run-and-return coroutine for {command_name}()") + self.debug(f"{self.name}: creating run-and-return coroutine for {command_name}()") coroutine = self.run_and_return(client_id, command_fn, *args, **kwargs) - # self.log.debug(f"{self.name}: creating task for {command_name}() coroutine") + self.debug(f"{self.name}: creating task for {command_name}() coroutine") task = asyncio.create_task(coroutine) self.tasks[client_id] = task, command_fn, args, kwargs - # self.log.debug(f"{self.name}: finished creating task for {command_name}() coroutine") + self.debug(f"{self.name}: finished creating task for {command_name}() coroutine") except BaseException as e: await self._shutdown() if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): From 063d0277f029a94e7e526b6b2538d74f1d5aea79 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 18 Aug 2024 22:08:01 -0400 Subject: [PATCH 084/124] more debugging --- bbot/core/engine.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index c1f5b3596..3b405b4de 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -405,7 +405,7 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs): fn_str = f"{command_fn.__name__}({args}, {kwargs})" with self.client_id_context(client_id): try: - self.debug(f"{self.name}: run-and-return {fn_str}") + self.debug(f"{self.name}: Starting run-and-return {fn_str}") try: result = await command_fn(*args, **kwargs) except BaseException as e: @@ -413,7 +413,7 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs): log_fn = self.log.debug else: log_fn = self.log.error - error = f"Error in {self.name} {fn_str}: {e}" + error = f"{self.name}: Error in {fn_str}: {e}" trace = traceback.format_exc() log_fn(error) self.log.trace(trace) @@ -428,30 +428,30 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs): ) self.log.critical(traceback.format_exc()) finally: - self.debug(f"{self.name} finished run-and-return {command_fn.__name__}({args}, {kwargs})") + self.debug(f"{self.name} finished run-and-return {fn_str}") async def run_and_yield(self, client_id, command_fn, *args, **kwargs): fn_str = f"{command_fn.__name__}({args}, {kwargs})" with self.client_id_context(client_id): try: - self.debug(f"{self.name}: run-and-yield {fn_str}") + self.debug(f"{self.name}: Starting run-and-yield {fn_str}") try: async for _ in command_fn(*args, **kwargs): - self.debug(f"{self.name}: sending iteration for {command_fn.__name__}(): {_}") + self.debug(f"{self.name}: Sending iteration for {fn_str}: {_}") await self.send_socket_multipart(client_id, _) except BaseException as e: if in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): log_fn = self.log.debug else: log_fn = self.log.error - error = f"Error in {self.name} {fn_str}: {e}" + error = f"{self.name}: Error in {fn_str}: {e}" trace = traceback.format_exc() log_fn(error) self.log.trace(trace) result = {"_e": (error, trace)} await self.send_socket_multipart(client_id, result) finally: - self.debug(f"{self.name} reached end of run-and-yield iteration for {command_fn.__name__}()") + self.debug(f"{self.name}: Reached end of run-and-yield iteration for {fn_str}") # _s == special signal that means StopIteration await self.send_socket_multipart(client_id, {"_s": None}) self.tasks.pop(client_id, None) @@ -461,7 +461,7 @@ async def run_and_yield(self, client_id, command_fn, *args, **kwargs): ) self.log.critical(traceback.format_exc()) finally: - self.debug(f"{self.name} finished run-and-yield {command_fn.__name__}()") + self.debug(f"{self.name}: Finished run-and-yield {fn_str}") async def send_socket_multipart(self, client_id, message): try: From be2ae32fe3198d4f670e088f374233ff146c1c4c Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 19 Aug 2024 00:47:40 -0400 Subject: [PATCH 085/124] high water marks --- bbot/core/engine.py | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index 3b405b4de..4e65a0456 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -316,7 +316,9 @@ async def new_socket(self): self.debug(f"{self.name}: waiting for server process to start...") await asyncio.sleep(0.1) socket = self.context.socket(zmq.DEALER) - socket.setsockopt(zmq.LINGER, 0) + socket.setsockopt(zmq.LINGER, 0) # Discard pending messages immediately disconnect() or close() + socket.setsockopt(zmq.SNDHWM, 0) # Unlimited send buffer + socket.setsockopt(zmq.RCVHWM, 0) # Unlimited receive buffer socket.connect(f"ipc://{self.socket_path}") self.sockets.add(socket) try: @@ -386,10 +388,11 @@ def __init__(self, socket_path, debug=False): if self.socket_path is not None: # create ZeroMQ context self.context = zmq.asyncio.Context() - self.context.setsockopt(zmq.LINGER, 0) # ROUTER socket can handle multiple concurrent requests self.socket = self.context.socket(zmq.ROUTER) - self.socket.setsockopt(zmq.LINGER, 0) + self.socket.setsockopt(zmq.LINGER, 0) # Discard pending messages immediately disconnect() or close() + self.socket.setsockopt(zmq.SNDHWM, 0) # Unlimited send buffer + self.socket.setsockopt(zmq.RCVHWM, 0) # Unlimited receive buffer # create socket file self.socket.bind(f"ipc://{self.socket_path}") @@ -405,7 +408,7 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs): fn_str = f"{command_fn.__name__}({args}, {kwargs})" with self.client_id_context(client_id): try: - self.debug(f"{self.name}: Starting run-and-return {fn_str}") + self.debug(f"{self.name}: starting run-and-return {fn_str}") try: result = await command_fn(*args, **kwargs) except BaseException as e: @@ -413,14 +416,14 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs): log_fn = self.log.debug else: log_fn = self.log.error - error = f"{self.name}: Error in {fn_str}: {e}" + error = f"{self.name}: error in {fn_str}: {e}" trace = traceback.format_exc() log_fn(error) self.log.trace(trace) result = {"_e": (error, trace)} finally: self.tasks.pop(client_id, None) - self.debug(f"{self.name}: Sending response to {fn_str}: {result}") + self.debug(f"{self.name}: sending response to {fn_str}: {result}") await self.send_socket_multipart(client_id, result) except BaseException as e: self.log.critical( @@ -434,24 +437,24 @@ async def run_and_yield(self, client_id, command_fn, *args, **kwargs): fn_str = f"{command_fn.__name__}({args}, {kwargs})" with self.client_id_context(client_id): try: - self.debug(f"{self.name}: Starting run-and-yield {fn_str}") + self.debug(f"{self.name}: starting run-and-yield {fn_str}") try: async for _ in command_fn(*args, **kwargs): - self.debug(f"{self.name}: Sending iteration for {fn_str}: {_}") + self.debug(f"{self.name}: sending iteration for {fn_str}: {_}") await self.send_socket_multipart(client_id, _) except BaseException as e: if in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): log_fn = self.log.debug else: log_fn = self.log.error - error = f"{self.name}: Error in {fn_str}: {e}" + error = f"{self.name}: error in {fn_str}: {e}" trace = traceback.format_exc() log_fn(error) self.log.trace(trace) result = {"_e": (error, trace)} await self.send_socket_multipart(client_id, result) finally: - self.debug(f"{self.name}: Reached end of run-and-yield iteration for {fn_str}") + self.debug(f"{self.name}: reached end of run-and-yield iteration for {fn_str}") # _s == special signal that means StopIteration await self.send_socket_multipart(client_id, {"_s": None}) self.tasks.pop(client_id, None) @@ -461,14 +464,14 @@ async def run_and_yield(self, client_id, command_fn, *args, **kwargs): ) self.log.critical(traceback.format_exc()) finally: - self.debug(f"{self.name}: Finished run-and-yield {fn_str}") + self.debug(f"{self.name}: finished run-and-yield {fn_str}") async def send_socket_multipart(self, client_id, message): try: message = pickle.dumps(message) await self._infinite_retry(self.socket.send_multipart, [client_id, message]) except Exception as e: - self.log.verbose(f"Error sending ZMQ message: {e}") + self.log.verbose(f"{self.name}: error sending ZMQ message: {e}") self.log.trace(traceback.format_exc()) def check_error(self, message): From be9179ad0de405e424f871f6163e87458b6cc27d Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 19 Aug 2024 01:50:34 -0400 Subject: [PATCH 086/124] better engine debug --- bbot/core/engine.py | 64 ++++++++++++++++++++++++--------------------- 1 file changed, 34 insertions(+), 30 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index 4e65a0456..7773f0513 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -43,7 +43,7 @@ class EngineBase: def __init__(self, debug=False): self._shutdown_status = False self.log = logging.getLogger(f"bbot.core.{self.__class__.__name__.lower()}") - self._debug = debug + self._engine_debug = debug def pickle(self, obj): try: @@ -79,8 +79,8 @@ async def _infinite_retry(self, callback, *args, **kwargs): if max_retries is not None and retries > max_retries: raise TimeoutError(f"Timed out after {max_retries*interval:,} seconds {context}") - def debug(self, *args, **kwargs): - if self._debug: + def engine_debug(self, *args, **kwargs): + if self._engine_debug: self.log.debug(*args, **kwargs) @@ -138,17 +138,17 @@ def __init__(self, debug=False, **kwargs): def check_error(self, message): if isinstance(message, dict) and len(message) == 1 and "_e" in message: - self.debug(f"{self.name}: got error message: {message}") + self.engine_debug(f"{self.name}: got error message: {message}") error, trace = message["_e"] error = self.ERROR_CLASS(error) error.engine_traceback = trace - self.debug(f"{self.name}: raising {error.__class__.__name__}") + self.engine_debug(f"{self.name}: raising {error.__class__.__name__}") raise error return False async def run_and_return(self, command, *args, **kwargs): fn_str = f"{command}({args}, {kwargs})" - self.debug(f"{self.name}: executing run-and-return {fn_str}") + self.engine_debug(f"{self.name}: executing run-and-return {fn_str}") if self._shutdown_status and not command == "_shutdown": self.log.verbose(f"{self.name} has been shut down and is not accepting new tasks") return @@ -168,7 +168,7 @@ async def run_and_return(self, command, *args, **kwargs): raise # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") message = self.unpickle(binary) - self.debug(f"{self.name}: {fn_str} got return value: {message}") + self.engine_debug(f"{self.name}: {fn_str} got return value: {message}") # error handling if self.check_error(message): return @@ -176,7 +176,7 @@ async def run_and_return(self, command, *args, **kwargs): async def run_and_yield(self, command, *args, **kwargs): fn_str = f"{command}({args}, {kwargs})" - self.debug(f"{self.name}: executing run-and-yield {fn_str}") + self.engine_debug(f"{self.name}: executing run-and-yield {fn_str}") if self._shutdown_status: self.log.verbose("Engine has been shut down and is not accepting new tasks") return @@ -195,18 +195,18 @@ async def run_and_yield(self, command, *args, **kwargs): ) # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") message = self.unpickle(binary) - self.debug(f"{self.name}: {fn_str} got iteration: {message}") + self.engine_debug(f"{self.name}: {fn_str} got iteration: {message}") # error handling if self.check_error(message) or self.check_stop(message): break yield message except (StopAsyncIteration, GeneratorExit) as e: exc_name = e.__class__.__name__ - self.debug(f"{self.name}.{command} got {exc_name}") + self.engine_debug(f"{self.name}.{command} got {exc_name}") try: await self.send_cancel_message(socket, fn_str) except Exception: - self.debug(f"{self.name}.{command} failed to send cancel message after {exc_name}") + self.engine_debug(f"{self.name}.{command} failed to send cancel message after {exc_name}") self.log.trace(traceback.format_exc()) break @@ -273,7 +273,7 @@ def start_server(self): # this allows us to more easily mock http, etc. if os.environ.get("BBOT_TESTING", "") == "True": kwargs["_loop"] = get_event_loop() - kwargs["debug"] = self._debug + kwargs["debug"] = self._engine_debug self.process = CORE.create_process( target=self.server_process, args=( @@ -313,7 +313,7 @@ async def new_socket(self): if self._server_process is None: self._server_process = self.start_server() while not self.socket_path.exists(): - self.debug(f"{self.name}: waiting for server process to start...") + self.engine_debug(f"{self.name}: waiting for server process to start...") await asyncio.sleep(0.1) socket = self.context.socket(zmq.DEALER) socket.setsockopt(zmq.LINGER, 0) # Discard pending messages immediately disconnect() or close() @@ -379,12 +379,14 @@ class EngineServer(EngineBase): def __init__(self, socket_path, debug=False): self.name = f"EngineServer {self.__class__.__name__}" super().__init__(debug=debug) + self.engine_debug(f"{self.name}: finished setup 1 (_debug={self._engine_debug})") self.socket_path = socket_path self.client_id_var = contextvars.ContextVar("client_id", default=None) # task <--> client id mapping self.tasks = {} # child tasks spawned by main tasks self.child_tasks = {} + self.engine_debug(f"{self.name}: finished setup 2 (_debug={self._engine_debug})") if self.socket_path is not None: # create ZeroMQ context self.context = zmq.asyncio.Context() @@ -395,6 +397,7 @@ def __init__(self, socket_path, debug=False): self.socket.setsockopt(zmq.RCVHWM, 0) # Unlimited receive buffer # create socket file self.socket.bind(f"ipc://{self.socket_path}") + self.engine_debug(f"{self.name}: finished setup 3 (_debug={self._engine_debug})") @contextlib.contextmanager def client_id_context(self, value): @@ -406,9 +409,10 @@ def client_id_context(self, value): async def run_and_return(self, client_id, command_fn, *args, **kwargs): fn_str = f"{command_fn.__name__}({args}, {kwargs})" + self.engine_debug(fn_str) with self.client_id_context(client_id): try: - self.debug(f"{self.name}: starting run-and-return {fn_str}") + self.engine_debug(f"{self.name}: starting run-and-return {fn_str}") try: result = await command_fn(*args, **kwargs) except BaseException as e: @@ -423,7 +427,7 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs): result = {"_e": (error, trace)} finally: self.tasks.pop(client_id, None) - self.debug(f"{self.name}: sending response to {fn_str}: {result}") + self.engine_debug(f"{self.name}: sending response to {fn_str}: {result}") await self.send_socket_multipart(client_id, result) except BaseException as e: self.log.critical( @@ -431,16 +435,16 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs): ) self.log.critical(traceback.format_exc()) finally: - self.debug(f"{self.name} finished run-and-return {fn_str}") + self.engine_debug(f"{self.name} finished run-and-return {fn_str}") async def run_and_yield(self, client_id, command_fn, *args, **kwargs): fn_str = f"{command_fn.__name__}({args}, {kwargs})" with self.client_id_context(client_id): try: - self.debug(f"{self.name}: starting run-and-yield {fn_str}") + self.engine_debug(f"{self.name}: starting run-and-yield {fn_str}") try: async for _ in command_fn(*args, **kwargs): - self.debug(f"{self.name}: sending iteration for {fn_str}: {_}") + self.engine_debug(f"{self.name}: sending iteration for {fn_str}: {_}") await self.send_socket_multipart(client_id, _) except BaseException as e: if in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): @@ -454,7 +458,7 @@ async def run_and_yield(self, client_id, command_fn, *args, **kwargs): result = {"_e": (error, trace)} await self.send_socket_multipart(client_id, result) finally: - self.debug(f"{self.name}: reached end of run-and-yield iteration for {fn_str}") + self.engine_debug(f"{self.name}: reached end of run-and-yield iteration for {fn_str}") # _s == special signal that means StopIteration await self.send_socket_multipart(client_id, {"_s": None}) self.tasks.pop(client_id, None) @@ -464,7 +468,7 @@ async def run_and_yield(self, client_id, command_fn, *args, **kwargs): ) self.log.critical(traceback.format_exc()) finally: - self.debug(f"{self.name}: finished run-and-yield {fn_str}") + self.engine_debug(f"{self.name}: finished run-and-yield {fn_str}") async def send_socket_multipart(self, client_id, message): try: @@ -479,12 +483,12 @@ def check_error(self, message): return True async def worker(self): - self.debug(f"{self.name}: starting worker") + self.engine_debug(f"{self.name}: starting worker") try: while 1: client_id, binary = await self.socket.recv_multipart() message = self.unpickle(binary) - self.debug(f"{self.name} got message: {message}") + self.engine_debug(f"{self.name} got message: {message}") if self.check_error(message): continue @@ -495,7 +499,7 @@ async def worker(self): # -1 == cancel task if cmd == -1: - self.debug(f"{self.name} got cancel signal") + self.engine_debug(f"{self.name} got cancel signal") await self.send_socket_multipart(client_id, {"m": "CANCEL_OK"}) await self.cancel_task(client_id) continue @@ -524,23 +528,23 @@ async def worker(self): continue if inspect.isasyncgenfunction(command_fn): - self.debug(f"{self.name}: creating run-and-yield coroutine for {command_name}()") + self.engine_debug(f"{self.name}: creating run-and-yield coroutine for {command_name}()") coroutine = self.run_and_yield(client_id, command_fn, *args, **kwargs) else: - self.debug(f"{self.name}: creating run-and-return coroutine for {command_name}()") + self.engine_debug(f"{self.name}: creating run-and-return coroutine for {command_name}()") coroutine = self.run_and_return(client_id, command_fn, *args, **kwargs) - self.debug(f"{self.name}: creating task for {command_name}() coroutine") + self.engine_debug(f"{self.name}: creating task for {command_name}() coroutine") task = asyncio.create_task(coroutine) self.tasks[client_id] = task, command_fn, args, kwargs - self.debug(f"{self.name}: finished creating task for {command_name}() coroutine") + self.engine_debug(f"{self.name}: finished creating task for {command_name}() coroutine") except BaseException as e: await self._shutdown() if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): self.log.error(f"{self.name}: error in EngineServer worker: {e}") self.log.trace(traceback.format_exc()) finally: - self.debug(f"{self.name}: finished worker()") + self.engine_debug(f"{self.name}: finished worker()") async def _shutdown(self): if not self._shutdown_status: @@ -588,11 +592,11 @@ async def cancel_task(self, client_id): if parent_task is None: return parent_task, _cmd, _args, _kwargs = parent_task - self.debug(f"{self.name}: Cancelling client id {client_id} (task: {parent_task})") + self.engine_debug(f"{self.name}: Cancelling client id {client_id} (task: {parent_task})") parent_task.cancel() child_tasks = self.child_tasks.pop(client_id, set()) if child_tasks: - self.debug(f"{self.name}: Cancelling {len(child_tasks):,} child tasks for client id {client_id}") + self.engine_debug(f"{self.name}: Cancelling {len(child_tasks):,} child tasks for client id {client_id}") for child_task in child_tasks: child_task.cancel() From 4ac5a72bbc5f322575fe78c87adc29f423bdacc0 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 19 Aug 2024 01:53:15 -0400 Subject: [PATCH 087/124] fix queue draining bug --- bbot/scanner/scanner.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 4e1d5a104..1ec21d5c3 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -740,11 +740,11 @@ def _drain_queues(self): for module in self.modules.values(): with contextlib.suppress(asyncio.queues.QueueEmpty): while 1: - if module.incoming_event_queue: + if module.incoming_event_queue is not None: module.incoming_event_queue.get_nowait() with contextlib.suppress(asyncio.queues.QueueEmpty): while 1: - if module.outgoing_event_queue: + if module.outgoing_event_queue is not None: module.outgoing_event_queue.get_nowait() self.debug("Finished draining queues") From 3b219e81a25b28e8980703289b1b0c66aba12d9b Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 19 Aug 2024 02:19:58 -0400 Subject: [PATCH 088/124] more debugging --- bbot/core/engine.py | 2 +- bbot/core/helpers/web/client.py | 5 +++++ bbot/core/helpers/web/engine.py | 7 +++---- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index 7773f0513..aa7b93f5a 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -81,7 +81,7 @@ async def _infinite_retry(self, callback, *args, **kwargs): def engine_debug(self, *args, **kwargs): if self._engine_debug: - self.log.debug(*args, **kwargs) + self.log.trace(*args, **kwargs) class EngineClient(EngineBase): diff --git a/bbot/core/helpers/web/client.py b/bbot/core/helpers/web/client.py index cd925730d..f72dcab5d 100644 --- a/bbot/core/helpers/web/client.py +++ b/bbot/core/helpers/web/client.py @@ -72,6 +72,7 @@ def __init__(self, *args, **kwargs): proxies = self._web_config.get("http_proxy", None) kwargs["proxies"] = proxies + log.debug(f"Creating httpx.AsyncClient({args}, {kwargs})") super().__init__(*args, **kwargs) if not self._persist_cookies: self._cookies = DummyCookies() @@ -91,3 +92,7 @@ def _merge_cookies(self, cookies): if self._persist_cookies: return super()._merge_cookies(cookies) return cookies + + @property + def retries(self): + return self._transport._pool._retries diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 448d95d58..cb90b1067 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -35,20 +35,19 @@ def __init__(self, socket_path, target, config={}, debug=False): self.http_debug = self.web_config.get("debug", False) self._ssl_context_noverify = None self.web_clients = {} - self.web_clients[0] = self.AsyncClient(persist_cookies=False, retries=0) - self.web_client = self.web_clients[0] + self.web_client = self.AsyncClient(persist_cookies=False) def AsyncClient(self, *args, **kwargs): # cache by retries to prevent unwanted accumulation of clients # (they are not garbage-collected) - retries = kwargs.get("retries", 0) + retries = kwargs.get("retries", 1) try: return self.web_clients[retries] except KeyError: from .client import BBOTAsyncClient client = BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) - self.web_clients[retries] = client + self.web_clients[client.retries] = client return client async def request(self, *args, **kwargs): From 24c2fa70cec221522e84420872135dd05b21bed2 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 20 Aug 2024 10:08:22 -0400 Subject: [PATCH 089/124] update unstructured --- bbot/core/helpers/web/engine.py | 3 +-- bbot/modules/unstructured.py | 2 +- bbot/scanner/preset/environ.py | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index cb90b1067..b349d11fa 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -83,8 +83,7 @@ async def request(self, *args, **kwargs): async with self._acatch(url, raise_error): if self.http_debug: - logstr = f"Web request: {str(args)}, {str(kwargs)}" - log.trace(logstr) + log.trace(f"Web request: {str(args)}, {str(kwargs)}") response = await client.request(*args, **kwargs) if self.http_debug: log.trace( diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 83b3552c9..6f5663a4d 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -67,7 +67,7 @@ class unstructured(BaseModule): } deps_apt = ["libmagic-dev", "poppler-utils", "tesseract-ocr", "libreoffice", "pandoc"] - deps_pip = ["unstructured[all-docs]>=0.5.15,<1.0"] + deps_pip = ["unstructured[all-docs]>=0.15.6,<1.0"] scope_distance_modifier = 1 diff --git a/bbot/scanner/preset/environ.py b/bbot/scanner/preset/environ.py index c4c2b8f5b..f14b0c0fb 100644 --- a/bbot/scanner/preset/environ.py +++ b/bbot/scanner/preset/environ.py @@ -42,7 +42,7 @@ def add_to_path(v, k="PATH", environ=None): if _ != v and _ not in deduped_var_list: deduped_var_list.append(_) deduped_var_list = [v] + deduped_var_list - new_var_str = ":".join(deduped_var_list) + new_var_str = ":".join(deduped_var_list).strip(":") environ[k] = new_var_str From 844b684b960a9ce1e6ddd064cd472823059f5c3c Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 20 Aug 2024 13:26:58 -0400 Subject: [PATCH 090/124] fix web proxy --- bbot/modules/deadly/ffuf.py | 4 ++++ bbot/modules/deadly/nuclei.py | 2 +- bbot/modules/gowitness.py | 2 +- bbot/modules/wpscan.py | 2 +- bbot/scanner/preset/environ.py | 2 +- bbot/scanner/preset/preset.py | 2 +- 6 files changed, 9 insertions(+), 5 deletions(-) diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index 8995dd3dc..d1ce50e38 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -37,6 +37,7 @@ async def setup(self): self.canary = "".join(random.choice(string.ascii_lowercase) for i in range(10)) wordlist_url = self.config.get("wordlist", "") + self.proxy = self.scan.web_config.get("http_proxy", "") self.debug(f"Using wordlist [{wordlist_url}]") self.wordlist = await self.helpers.wordlist(wordlist_url) self.wordlist_lines = list(self.helpers.read_file(self.wordlist)) @@ -243,6 +244,9 @@ async def execute_ffuf( self.debug("invalid mode specified, aborting") return + if self.proxy: + command += ["-x", self.proxy] + if apply_filters: if ext in filters.keys(): if filters[ext][0] == ("ABORT"): diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 45d76411c..1af628827 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -75,7 +75,7 @@ async def setup(self): self.warning(f"Failure while updating nuclei templates: {update_results.stderr}") else: self.warning("Error running nuclei template update command") - self.proxy = self.scan.config.get("http_proxy", "") + self.proxy = self.scan.web_config.get("http_proxy", "") self.mode = self.config.get("mode", "severe").lower() self.ratelimit = int(self.config.get("ratelimit", 150)) self.concurrency = int(self.config.get("concurrency", 25)) diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 5bfdfc42a..9d6d57483 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -56,7 +56,7 @@ async def setup(self): self.threads = self.config.get("threads", 0) if not self.threads: self.threads = default_thread_count - self.proxy = self.scan.config.get("http_proxy", "") + self.proxy = self.scan.web_config.get("http_proxy", "") self.resolution_x = self.config.get("resolution_x") self.resolution_y = self.config.get("resolution_y") self.visit_social = self.config.get("social", True) diff --git a/bbot/modules/wpscan.py b/bbot/modules/wpscan.py index 60f247af4..10dade438 100644 --- a/bbot/modules/wpscan.py +++ b/bbot/modules/wpscan.py @@ -62,7 +62,7 @@ async def setup(self): self.ignore_events = ["xmlrpc", "readme"] self.api_key = self.config.get("api_key", "") self.enumerate = self.config.get("enumerate", "vp,vt,tt,cb,dbe,u,m") - self.proxy = self.scan.config.get("http_proxy", "") + self.proxy = self.scan.web_config.get("http_proxy", "") self.threads = self.config.get("threads", 5) self.request_timeout = self.config.get("request_timeout", 60) self.connection_timeout = self.config.get("connection_timeout", 30) diff --git a/bbot/scanner/preset/environ.py b/bbot/scanner/preset/environ.py index f14b0c0fb..4b7121e3c 100644 --- a/bbot/scanner/preset/environ.py +++ b/bbot/scanner/preset/environ.py @@ -107,7 +107,7 @@ def prepare(self): environ.update(bbot_environ) # handle HTTP proxy - http_proxy = self.preset.config.get("http_proxy", "") + http_proxy = self.preset.config.get("web", {}).get("http_proxy", "") if http_proxy: environ["HTTP_PROXY"] = http_proxy environ["HTTPS_PROXY"] = http_proxy diff --git a/bbot/scanner/preset/preset.py b/bbot/scanner/preset/preset.py index 9e8242ea9..99ad86db5 100644 --- a/bbot/scanner/preset/preset.py +++ b/bbot/scanner/preset/preset.py @@ -74,7 +74,7 @@ class Preset: "1.2.3.0/24", flags=["subdomain-enum"], modules=["nuclei"], - config={"http_proxy": "http://127.0.0.1"} + config={"web": {"http_proxy": "http://127.0.0.1"}} ) >>> scan = Scanner(preset=preset) From 70a7cff83e7eed71b5f8b76d07bb5d32c066a3f4 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 20 Aug 2024 15:46:30 -0400 Subject: [PATCH 091/124] fix ffuf tests --- bbot/modules/deadly/ffuf.py | 3 +-- bbot/modules/ffuf_shortnames.py | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index d1ce50e38..a8e5ec174 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -34,10 +34,9 @@ class ffuf(BaseModule): in_scope_only = True async def setup(self): - + self.proxy = self.scan.web_config.get("http_proxy", "") self.canary = "".join(random.choice(string.ascii_lowercase) for i in range(10)) wordlist_url = self.config.get("wordlist", "") - self.proxy = self.scan.web_config.get("http_proxy", "") self.debug(f"Using wordlist [{wordlist_url}]") self.wordlist = await self.helpers.wordlist(wordlist_url) self.wordlist_lines = list(self.helpers.read_file(self.wordlist)) diff --git a/bbot/modules/ffuf_shortnames.py b/bbot/modules/ffuf_shortnames.py index 76e36de03..a1adcda61 100644 --- a/bbot/modules/ffuf_shortnames.py +++ b/bbot/modules/ffuf_shortnames.py @@ -67,6 +67,7 @@ class ffuf_shortnames(ffuf): in_scope_only = True async def setup(self): + self.proxy = self.scan.web_config.get("http_proxy", "") self.canary = "".join(random.choice(string.ascii_lowercase) for i in range(10)) wordlist = self.config.get("wordlist", "") if not wordlist: From ba31de595a222b478986f29795c603186ecf714f Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 20 Aug 2024 16:18:10 -0400 Subject: [PATCH 092/124] cache asn queries --- bbot/modules/report/asn.py | 69 ++++++++++++++++++++------------------ 1 file changed, 37 insertions(+), 32 deletions(-) diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index 61e51a725..9fa0897cb 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -18,6 +18,7 @@ class asn(BaseReportModule): async def setup(self): self.asn_counts = {} self.asn_cache = {} + self.ripe_cache = {} self.sources = ["bgpview", "ripe"] self.unknown_asn = { "asn": "UNKNOWN", @@ -144,38 +145,42 @@ async def get_asn_ripe(self, ip): return asns async def get_asn_metadata_ripe(self, asn_number): - metadata_keys = { - "name": ["ASName", "OrgId"], - "description": ["OrgName", "OrgTechName", "RTechName"], - "country": ["Country"], - } - url = f"https://stat.ripe.net/data/whois/data.json?resource={asn_number}" - response = await self.get_url(url, "ASN Metadata", cache=True) - if response == False: - return False - data = response.get("data", {}) - if not data: - data = {} - records = data.get("records", []) - if not records: - records = [] - emails = set() - asn = {k: "" for k in metadata_keys.keys()} - for record in records: - for item in record: - key = item.get("key", "") - value = item.get("value", "") - for email in await self.helpers.re.extract_emails(value): - emails.add(email.lower()) - if not key: - continue - if value: - for keyname, keyvals in metadata_keys.items(): - if key in keyvals and not asn.get(keyname, ""): - asn[keyname] = value - asn["emails"] = list(emails) - asn["asn"] = str(asn_number) - return asn + try: + return self.ripe_cache[asn_number] + except KeyError: + metadata_keys = { + "name": ["ASName", "OrgId"], + "description": ["OrgName", "OrgTechName", "RTechName"], + "country": ["Country"], + } + url = f"https://stat.ripe.net/data/whois/data.json?resource={asn_number}" + response = await self.get_url(url, "ASN Metadata", cache=True) + if response == False: + return False + data = response.get("data", {}) + if not data: + data = {} + records = data.get("records", []) + if not records: + records = [] + emails = set() + asn = {k: "" for k in metadata_keys.keys()} + for record in records: + for item in record: + key = item.get("key", "") + value = item.get("value", "") + for email in await self.helpers.re.extract_emails(value): + emails.add(email.lower()) + if not key: + continue + if value: + for keyname, keyvals in metadata_keys.items(): + if key in keyvals and not asn.get(keyname, ""): + asn[keyname] = value + asn["emails"] = list(emails) + asn["asn"] = str(asn_number) + self.ripe_cache[asn_number][asn_number] = asn + return asn async def get_asn_bgpview(self, ip): url = f"https://api.bgpview.io/ip/{ip}" From 0b82e2f1bc1848805c1496b7902b1ca126eb5639 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 20 Aug 2024 16:20:32 -0400 Subject: [PATCH 093/124] fix asn bug --- bbot/modules/report/asn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index 9fa0897cb..ba5e1e39a 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -179,7 +179,7 @@ async def get_asn_metadata_ripe(self, asn_number): asn[keyname] = value asn["emails"] = list(emails) asn["asn"] = str(asn_number) - self.ripe_cache[asn_number][asn_number] = asn + self.ripe_cache[asn_number] = asn return asn async def get_asn_bgpview(self, ip): From 196e87fe54fc34ba8d513c2e06609fdc16ac304b Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 21 Aug 2024 02:29:17 -0400 Subject: [PATCH 094/124] increase interval --- bbot/core/engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index aa7b93f5a..e3485df52 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -63,10 +63,10 @@ def unpickle(self, binary): return error_sentinel async def _infinite_retry(self, callback, *args, **kwargs): - interval = kwargs.pop("_interval", 15) + interval = kwargs.pop("_interval", 300) context = kwargs.pop("_context", "") # default overall timeout of 5 minutes (15 second interval * 20 iterations) - max_retries = kwargs.pop("_max_retries", 4 * 5) + max_retries = kwargs.pop("_max_retries", 1) if not context: context = f"{callback.__name__}({args}, {kwargs})" retries = 0 From ee21c854c563570818a813bf82cb9d9be5d46e1e Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 21 Aug 2024 16:01:10 -0400 Subject: [PATCH 095/124] debug logging --- bbot/core/engine.py | 4 ++-- bbot/core/helpers/web/client.py | 2 +- bbot/core/helpers/web/engine.py | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index e3485df52..01b805c1f 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -65,7 +65,7 @@ def unpickle(self, binary): async def _infinite_retry(self, callback, *args, **kwargs): interval = kwargs.pop("_interval", 300) context = kwargs.pop("_context", "") - # default overall timeout of 5 minutes (15 second interval * 20 iterations) + # default overall timeout of 10 minutes (300 second interval * 2 iterations) max_retries = kwargs.pop("_max_retries", 1) if not context: context = f"{callback.__name__}({args}, {kwargs})" @@ -77,7 +77,7 @@ async def _infinite_retry(self, callback, *args, **kwargs): self.log.debug(f"{self.name}: Timeout after {interval:,} seconds {context}, retrying...") retries += 1 if max_retries is not None and retries > max_retries: - raise TimeoutError(f"Timed out after {max_retries*interval:,} seconds {context}") + raise TimeoutError(f"Timed out after {(max_retries+1)*interval:,} seconds {context}") def engine_debug(self, *args, **kwargs): if self._engine_debug: diff --git a/bbot/core/helpers/web/client.py b/bbot/core/helpers/web/client.py index f72dcab5d..9cbc418a6 100644 --- a/bbot/core/helpers/web/client.py +++ b/bbot/core/helpers/web/client.py @@ -72,7 +72,7 @@ def __init__(self, *args, **kwargs): proxies = self._web_config.get("http_proxy", None) kwargs["proxies"] = proxies - log.debug(f"Creating httpx.AsyncClient({args}, {kwargs})") + log.critical(f"Creating httpx.AsyncClient({args}, {kwargs})") super().__init__(*args, **kwargs) if not self._persist_cookies: self._cookies = DummyCookies() diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index b349d11fa..70fc20931 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -44,6 +44,7 @@ def AsyncClient(self, *args, **kwargs): try: return self.web_clients[retries] except KeyError: + log.critical('CREATING CLIENT') from .client import BBOTAsyncClient client = BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) From a9aa2ba45981dbc67de12695266ae87256b0c193 Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 22 Aug 2024 16:23:21 -0400 Subject: [PATCH 096/124] fix excavate bug --- bbot/core/helpers/web/engine.py | 2 +- bbot/core/helpers/web/web.py | 16 +++++++++++++--- .../module_tests/test_module_excavate.py | 12 ++++++------ .../module_tests/test_module_unstructured.py | 2 +- 4 files changed, 21 insertions(+), 11 deletions(-) diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 70fc20931..231137899 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -44,7 +44,7 @@ def AsyncClient(self, *args, **kwargs): try: return self.web_clients[retries] except KeyError: - log.critical('CREATING CLIENT') + log.critical("CREATING CLIENT") from .client import BBOTAsyncClient client = BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index c061a3d62..15afb93d1 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -55,6 +55,7 @@ def __init__(self, parent_helper): self.web_config = self.config.get("web", {}) self.web_spider_depth = self.web_config.get("spider_depth", 1) self.web_spider_distance = self.web_config.get("spider_distance", 0) + self.web_clients = {} self.target = self.preset.target self.ssl_verify = self.config.get("ssl_verify", False) engine_debug = self.config.get("engine", {}).get("debug", False) @@ -64,9 +65,18 @@ def __init__(self, parent_helper): ) def AsyncClient(self, *args, **kwargs): - from .client import BBOTAsyncClient - - return BBOTAsyncClient.from_config(self.config, self.target, *args, persist_cookies=False, **kwargs) + # cache by retries to prevent unwanted accumulation of clients + # (they are not garbage-collected) + retries = kwargs.get("retries", 1) + try: + return self.web_clients[retries] + except KeyError: + log.critical("CREATING CLIENT") + from .client import BBOTAsyncClient + + client = BBOTAsyncClient.from_config(self.config, self.target, *args, persist_cookies=False, **kwargs) + self.web_clients[client.retries] = client + return client async def request(self, *args, **kwargs): """ diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 3279d3c5d..123d42e31 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -19,7 +19,7 @@ async def setup_before_prep(self, module_test): \\nhttps://www1.test.notreal \\x3dhttps://www2.test.notreal %0ahttps://www3.test.notreal - \\u000ahttps://www4.test.notreal + \\u000ahttps://www4.test.notreal: \nwww5.test.notreal \\x3dwww6.test.notreal %0awww7.test.notreal @@ -989,17 +989,17 @@ def check(self, module_test, events): assert file.is_file(), "Destination file doesn't exist" assert open(file).read() == self.pdf_data, f"File at {file} does not contain the correct content" raw_text_events = [e for e in events if e.type == "RAW_TEXT"] - assert 1 == len(raw_text_events), "Failed to emmit RAW_TEXT event" + assert 1 == len(raw_text_events), "Failed to emit RAW_TEXT event" assert ( raw_text_events[0].data == self.unstructured_response ), f"Text extracted from PDF is incorrect, got {raw_text_events[0].data}" email_events = [e for e in events if e.type == "EMAIL_ADDRESS"] - assert 1 == len(email_events), "Failed to emmit EMAIL_ADDRESS event" + assert 1 == len(email_events), "Failed to emit EMAIL_ADDRESS event" assert ( email_events[0].data == "example@blacklanternsecurity.notreal" ), f"Email extracted from unstructured text is incorrect, got {email_events[0].data}" finding_events = [e for e in events if e.type == "FINDING"] - assert 2 == len(finding_events), "Failed to emmit FINDING events" + assert 2 == len(finding_events), "Failed to emit FINDING events" assert any( e.type == "FINDING" and "JWT" in e.data["description"] @@ -1008,7 +1008,7 @@ def check(self, module_test, events): and e.data["path"].endswith("http-127-0-0-1-8888-test-pdf.pdf") and str(e.host) == "127.0.0.1" for e in finding_events - ), f"Failed to emmit JWT event got {finding_events}" + ), f"Failed to emit JWT event got {finding_events}" assert any( e.type == "FINDING" and "DOTNET" in e.data["description"] @@ -1017,7 +1017,7 @@ def check(self, module_test, events): and e.data["path"].endswith("http-127-0-0-1-8888-test-pdf.pdf") and str(e.host) == "127.0.0.1" for e in finding_events - ), f"Failed to emmit serialized event got {finding_events}" + ), f"Failed to emit serialized event got {finding_events}" assert finding_events[0].data["path"] == str(file), "File path not included in finding event" url_events = [e.data for e in events if e.type == "URL_UNVERIFIED"] assert ( diff --git a/bbot/test/test_step_2/module_tests/test_module_unstructured.py b/bbot/test/test_step_2/module_tests/test_module_unstructured.py index 7acb24ad4..9d289377e 100644 --- a/bbot/test/test_step_2/module_tests/test_module_unstructured.py +++ b/bbot/test/test_step_2/module_tests/test_module_unstructured.py @@ -96,7 +96,7 @@ def check(self, module_test, events): assert file.is_file(), "Destination file doesn't exist" assert open(file).read() == self.pdf_data, f"File at {file} does not contain the correct content" raw_text_events = [e for e in events if e.type == "RAW_TEXT"] - assert 1 == len(raw_text_events), "Failed to emmit RAW_TEXT event" + assert 1 == len(raw_text_events), "Failed to emit RAW_TEXT event" assert ( raw_text_events[0].data == self.unstructured_response ), f"Text extracted from PDF is incorrect, got {raw_text_events[0].data}" From e4ca7155b92db59b51d44dcc805dcd1118bcf85f Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 23 Aug 2024 01:01:09 -0400 Subject: [PATCH 097/124] don't keep client tasks --- bbot/core/engine.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index 01b805c1f..c75cea536 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -563,6 +563,9 @@ async def _shutdown(self): def new_child_task(self, client_id, coro): task = asyncio.create_task(coro) + def remove_task(): + self.child_tasks.get(client_id, set()).discard(task) + task.add_done_callback(remove_task) try: self.child_tasks[client_id].add(task) except KeyError: From f7a748e31827ead4134f45f8d630efe15941799c Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 23 Aug 2024 01:01:58 -0400 Subject: [PATCH 098/124] fix bug --- bbot/core/engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index c75cea536..8b4709847 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -563,8 +563,8 @@ async def _shutdown(self): def new_child_task(self, client_id, coro): task = asyncio.create_task(coro) - def remove_task(): - self.child_tasks.get(client_id, set()).discard(task) + def remove_task(t): + self.child_tasks.get(client_id, set()).discard(t) task.add_done_callback(remove_task) try: self.child_tasks[client_id].add(task) From 5e981ff93cd77a0c4874bd51b726e67ac58e3d08 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 23 Aug 2024 01:09:46 -0400 Subject: [PATCH 099/124] blacked --- bbot/core/engine.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index 8b4709847..fdb4f06e1 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -563,8 +563,13 @@ async def _shutdown(self): def new_child_task(self, client_id, coro): task = asyncio.create_task(coro) + def remove_task(t): - self.child_tasks.get(client_id, set()).discard(t) + tasks = self.child_tasks.get(client_id, set()) + tasks.discard(t) + if not tasks: + self.child_tasks.pop(client_id, None) + task.add_done_callback(remove_task) try: self.child_tasks[client_id].add(task) From 429b234bbaddb6238201971feebb90546f768f78 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 23 Aug 2024 17:06:40 -0400 Subject: [PATCH 100/124] delete some code --- bbot/core/engine.py | 129 +++++++++++++++++++++++--------- bbot/core/helpers/dns/engine.py | 61 +++------------ bbot/core/helpers/web/engine.py | 59 +++------------ 3 files changed, 116 insertions(+), 133 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index fdb4f06e1..ccd3493c7 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -551,49 +551,106 @@ async def _shutdown(self): self.log.verbose(f"{self.name}: shutting down...") self._shutdown_status = True await self.cancel_all_tasks() - try: - self.context.destroy(linger=0) - except Exception: - self.log.trace(traceback.format_exc()) - try: - self.context.term() - except Exception: - self.log.trace(traceback.format_exc()) + context = getattr(self, "context", None) + if context is not None: + try: + context.destroy(linger=0) + except Exception: + self.log.trace(traceback.format_exc()) + try: + context.term() + except Exception: + self.log.trace(traceback.format_exc()) self.log.verbose(f"{self.name}: finished shutting down") - def new_child_task(self, client_id, coro): + async def task_pool(self, fn, args_kwargs, threads=10, timeout=300, global_kwargs=None): + if global_kwargs is None: + global_kwargs = {} + + tasks = {} + args_kwargs = list(args_kwargs) + + def new_task(): + if args_kwargs: + kwargs = {} + tracker = None + args = args_kwargs.pop(0) + if isinstance(args, (list, tuple)): + # you can specify a custom tracker value if you want + # this helps with correlating results + with suppress(ValueError): + args, kwargs, tracker = args + # or you can just specify args/kwargs + with suppress(ValueError): + args, kwargs = args + + if not isinstance(kwargs, dict): + raise ValueError(f"kwargs must be dict (got: {kwargs})") + if not isinstance(args, (list, tuple)): + args = [args] + + task = self.new_child_task(fn(*args, **kwargs, **global_kwargs)) + tasks[task] = (args, kwargs, tracker) + + for _ in range(threads): # Start initial batch of tasks + new_task() + + while tasks: # While there are tasks pending + # Wait for the first task to complete + finished = await self.finished_tasks(tasks, timeout=timeout) + for task in finished: + result = task.result() + (args, kwargs, tracker) = tasks.pop(task) + yield (args, kwargs, tracker), result + new_task() + + def new_child_task(self, coro): + """ + Create a new asyncio task, making sure to track it based on the client id. + + This allows the task to be automatically cancelled if its parent is cancelled. + """ + client_id = self.client_id_var.get() task = asyncio.create_task(coro) - def remove_task(t): - tasks = self.child_tasks.get(client_id, set()) - tasks.discard(t) - if not tasks: - self.child_tasks.pop(client_id, None) + if client_id: + + def remove_task(t): + tasks = self.child_tasks.get(client_id, set()) + tasks.discard(t) + if not tasks: + self.child_tasks.pop(client_id, None) + + task.add_done_callback(remove_task) + + try: + self.child_tasks[client_id].add(task) + except KeyError: + self.child_tasks[client_id] = {task} - task.add_done_callback(remove_task) - try: - self.child_tasks[client_id].add(task) - except KeyError: - self.child_tasks[client_id] = {task} return task - async def finished_tasks(self, client_id, timeout=None): - child_tasks = self.child_tasks.get(client_id, set()) - try: - done, pending = await asyncio.wait(child_tasks, return_when=asyncio.FIRST_COMPLETED, timeout=timeout) - except BaseException as e: - if isinstance(e, (TimeoutError, asyncio.exceptions.TimeoutError)): - done = set() - self.log.warning(f"{self.name}: Timeout after {timeout:,} seconds in finished_tasks({child_tasks})") - for task in child_tasks: - task.cancel() - else: - if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): - self.log.error(f"{self.name}: Unhandled exception in finished_tasks({child_tasks}): {e}") - self.log.trace(traceback.format_exc()) - raise - self.child_tasks[client_id] = pending - return done + async def finished_tasks(self, tasks, timeout=None): + """ + Given a list of asyncio tasks, return the ones that are finished with an optional timeout + """ + if tasks: + try: + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED, timeout=timeout) + return done + except BaseException as e: + if isinstance(e, (TimeoutError, asyncio.exceptions.TimeoutError)): + self.log.warning( + f"{self.name}: Timeout after {timeout:,} seconds in finished_tasks({tasks})" + ) + for task in tasks: + task.cancel() + else: + if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + self.log.error(f"{self.name}: Unhandled exception in finished_tasks({tasks}): {e}") + self.log.trace(traceback.format_exc()) + raise + return set() async def cancel_task(self, client_id): parent_task = self.tasks.pop(client_id, None) diff --git a/bbot/core/helpers/dns/engine.py b/bbot/core/helpers/dns/engine.py index d24c1f766..8a41c7c8e 100644 --- a/bbot/core/helpers/dns/engine.py +++ b/bbot/core/helpers/dns/engine.py @@ -349,57 +349,20 @@ async def resolve_batch(self, queries, threads=10, **kwargs): ('www.evilcorp.com', {'1.1.1.1'}) ('evilcorp.com', {'2.2.2.2'}) """ - tasks = {} - client_id = self.client_id_var.get() - - def new_task(query): - task = self.new_child_task(client_id, self.resolve(query, **kwargs)) - tasks[task] = query - - queries = list(queries) - for _ in range(threads): # Start initial batch of tasks - if queries: # Ensure there are args to process - new_task(queries.pop(0)) - - while tasks: # While there are tasks pending - # Wait for the first task to complete - finished = await self.finished_tasks(client_id, timeout=120) - - for task in finished: - results = task.result() - query = tasks.pop(task) - - if results: - yield (query, results) - - if queries: # Start a new task for each one completed, if URLs remain - new_task(queries.pop(0)) + async for (args, _, _), responses in self.task_pool( + self.resolve, args_kwargs=queries, threads=threads, global_kwargs=kwargs + ): + yield args[0], responses async def resolve_raw_batch(self, queries, threads=10, **kwargs): - tasks = {} - client_id = self.client_id_var.get() - - def new_task(query, rdtype): - task = self.new_child_task(client_id, self.resolve_raw(query, type=rdtype, **kwargs)) - tasks[task] = (query, rdtype) - - queries = list(queries) - for _ in range(threads): # Start initial batch of tasks - if queries: # Ensure there are args to process - new_task(*queries.pop(0)) - - while tasks: # While there are tasks pending - # Wait for the first task to complete - finished = await self.finished_tasks(client_id, timeout=120) - - for task in finished: - answers, errors = task.result() - query, rdtype = tasks.pop(task) - for answer in answers: - yield ((query, rdtype), (answer, errors)) - - if queries: # Start a new task for each one completed, if URLs remain - new_task(*queries.pop(0)) + queries_kwargs = [[q[0], {"type": q[1]}] for q in queries] + async for (args, kwargs, _), (answers, errors) in self.task_pool( + self.resolve_raw, args_kwargs=queries_kwargs, threads=threads, global_kwargs=kwargs + ): + query = args[0] + rdtype = kwargs["type"] + for answer in answers: + yield ((query, rdtype), (answer, errors)) async def _catch(self, callback, *args, **kwargs): """ diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 231137899..7ec79e925 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -92,54 +92,17 @@ async def request(self, *args, **kwargs): ) return response - async def request_batch(self, urls, *args, threads=10, **kwargs): - tasks = {} - client_id = self.client_id_var.get() - - urls = list(urls) - - def new_task(): - if urls: - url = urls.pop(0) - task = self.new_child_task(client_id, self.request(url, *args, **kwargs)) - tasks[task] = url - - for _ in range(threads): # Start initial batch of tasks - new_task() - - while tasks: # While there are tasks pending - # Wait for the first task to complete - finished = await self.finished_tasks(client_id, timeout=120) - - for task in finished: - response = task.result() - url = tasks.pop(task) - yield (url, response) - new_task() - - async def request_custom_batch(self, urls_and_kwargs, threads=10): - tasks = {} - client_id = self.client_id_var.get() - urls_and_kwargs = list(urls_and_kwargs) - - def new_task(): - if urls_and_kwargs: # Ensure there are args to process - url, kwargs, custom_tracker = urls_and_kwargs.pop(0) - task = self.new_child_task(client_id, self.request(url, **kwargs)) - tasks[task] = (url, kwargs, custom_tracker) - - for _ in range(threads): # Start initial batch of tasks - new_task() - - while tasks: # While there are tasks pending - # Wait for the first task to complete - done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) - - for task in done: - response = task.result() - url, kwargs, custom_tracker = tasks.pop(task) - yield (url, kwargs, custom_tracker, response) - new_task() + async def request_batch(self, urls, threads=10, **kwargs): + async for (args, _, _), response in self.task_pool( + self.request, args_kwargs=urls, threads=threads, global_kwargs=kwargs + ): + yield args[0], response + + async def request_custom_batch(self, urls_and_kwargs, threads=10, **kwargs): + async for (args, kwargs, tracker), response in self.task_pool( + self.request, args_kwargs=urls_and_kwargs, threads=threads, global_kwargs=kwargs + ): + yield args[0], kwargs, tracker, response async def download(self, url, **kwargs): warn = kwargs.pop("warn", True) From 798a1fa35f21539cee355d2c231ec8291e8b7fbb Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 23 Aug 2024 17:53:50 -0400 Subject: [PATCH 101/124] blacked --- bbot/core/engine.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bbot/core/engine.py b/bbot/core/engine.py index ccd3493c7..9d42c9719 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -640,9 +640,7 @@ async def finished_tasks(self, tasks, timeout=None): return done except BaseException as e: if isinstance(e, (TimeoutError, asyncio.exceptions.TimeoutError)): - self.log.warning( - f"{self.name}: Timeout after {timeout:,} seconds in finished_tasks({tasks})" - ) + self.log.warning(f"{self.name}: Timeout after {timeout:,} seconds in finished_tasks({tasks})") for task in tasks: task.cancel() else: From 09ec45d2e194c3bc98d92af871064a5df7dd27f0 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 23 Aug 2024 17:57:14 -0400 Subject: [PATCH 102/124] remove debug messages --- bbot/core/helpers/web/client.py | 2 +- bbot/core/helpers/web/engine.py | 1 - bbot/core/helpers/web/web.py | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/bbot/core/helpers/web/client.py b/bbot/core/helpers/web/client.py index 9cbc418a6..c09a0e485 100644 --- a/bbot/core/helpers/web/client.py +++ b/bbot/core/helpers/web/client.py @@ -72,7 +72,7 @@ def __init__(self, *args, **kwargs): proxies = self._web_config.get("http_proxy", None) kwargs["proxies"] = proxies - log.critical(f"Creating httpx.AsyncClient({args}, {kwargs})") + log.verbose(f"Creating httpx.AsyncClient({args}, {kwargs})") super().__init__(*args, **kwargs) if not self._persist_cookies: self._cookies = DummyCookies() diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 7ec79e925..60e7038aa 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -44,7 +44,6 @@ def AsyncClient(self, *args, **kwargs): try: return self.web_clients[retries] except KeyError: - log.critical("CREATING CLIENT") from .client import BBOTAsyncClient client = BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index 15afb93d1..6f7f1b578 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -71,7 +71,6 @@ def AsyncClient(self, *args, **kwargs): try: return self.web_clients[retries] except KeyError: - log.critical("CREATING CLIENT") from .client import BBOTAsyncClient client = BBOTAsyncClient.from_config(self.config, self.target, *args, persist_cookies=False, **kwargs) From b8451321102a24dc02b58af8cfd2b158ba086e4d Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 23 Aug 2024 18:03:12 -0400 Subject: [PATCH 103/124] fix bug with queue draining --- bbot/scanner/scanner.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 1ec21d5c3..d5a564f02 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -740,11 +740,11 @@ def _drain_queues(self): for module in self.modules.values(): with contextlib.suppress(asyncio.queues.QueueEmpty): while 1: - if module.incoming_event_queue is not None: + if module.incoming_event_queue not in (None, False): module.incoming_event_queue.get_nowait() with contextlib.suppress(asyncio.queues.QueueEmpty): while 1: - if module.outgoing_event_queue is not None: + if module.outgoing_event_queue not in (None, False): module.outgoing_event_queue.get_nowait() self.debug("Finished draining queues") From b55b79d8c804ea6640314985e853468d0d6910c8 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 23 Aug 2024 21:35:14 -0400 Subject: [PATCH 104/124] remove unneeded debug statements --- bbot/scanner/scanner.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index d5a564f02..ba550f217 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -656,14 +656,14 @@ def modules_status(self, _log=False): scan_active_status.append(f"scan.modules_finished: {self.modules_finished}") for m in sorted_modules: running = m.running - scan_active_status.append(f" {m}.finished: {m.finished}") - scan_active_status.append(f" running: {running}") + scan_active_status.append(f" {m}:") + # scan_active_status.append(f" running: {running}") if running: - scan_active_status.append(f" tasks:") + # scan_active_status.append(f" tasks:") for task in list(m._task_counter.tasks.values()): - scan_active_status.append(f" - {task}:") - scan_active_status.append(f" incoming_queue_size: {m.num_incoming_events}") - scan_active_status.append(f" outgoing_queue_size: {m.outgoing_event_queue.qsize()}") + scan_active_status.append(f" - {task}:") + # scan_active_status.append(f" incoming_queue_size: {m.num_incoming_events}") + # scan_active_status.append(f" outgoing_queue_size: {m.outgoing_event_queue.qsize()}") for line in scan_active_status: self.debug(line) From 3de844b66a1d72ebc55f9e521a8166e6580dc746 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 26 Aug 2024 13:00:16 -0400 Subject: [PATCH 105/124] fix unstructured bug --- bbot/modules/internal/excavate.py | 5 +++-- bbot/modules/unstructured.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index e542aa20a..0bbdd97fc 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -684,12 +684,13 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte final_url = "" for url_str in results: if identifier == "url_full": - if not await self.helpers.re.search(self.full_url_regex, url_str): + match = await self.helpers.re.search(self.full_url_regex, url_str) + if not match: self.excavate.debug( f"Rejecting potential full URL [{url_str}] as did not match full_url_regex" ) continue - final_url = url_str + final_url = match.group() self.excavate.debug(f"Discovered Full URL [{final_url}]") elif identifier == "url_attr" and hasattr(event, "parsed_url"): diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 6f5663a4d..6e19f4cf1 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -67,7 +67,7 @@ class unstructured(BaseModule): } deps_apt = ["libmagic-dev", "poppler-utils", "tesseract-ocr", "libreoffice", "pandoc"] - deps_pip = ["unstructured[all-docs]>=0.15.6,<1.0"] + deps_pip = ["unstructured[all-docs]==0.15.5"] scope_distance_modifier = 1 From 0a407903c077dcc88ce98dccbe698f1803685d34 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 26 Aug 2024 15:40:24 -0400 Subject: [PATCH 106/124] fix path bug --- bbot/core/helpers/command.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index 7283291fc..a13f8b29d 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -3,9 +3,9 @@ import logging import traceback from signal import SIGINT -from subprocess import CompletedProcess, CalledProcessError +from subprocess import CompletedProcess, CalledProcessError, SubprocessError -from .misc import smart_decode, smart_encode +from .misc import smart_decode, smart_encode, which log = logging.getLogger("bbot.core.helpers.command") @@ -276,6 +276,17 @@ def _prepare_command_kwargs(self, command, kwargs): command = command[0] command = [str(s) for s in command] + if not command: + raise SubprocessError("Must specify a command") + + # use full path of binary, if not already specified + binary = command[0] + if not "/" in binary: + binary_full_path = which(command[0]) + if binary_full_path is None: + raise SubprocessError(f'Command "{binary}" was not found') + command[0] = binary_full_path + env = kwargs.get("env", os.environ) if sudo and os.geteuid() != 0: self.depsinstaller.ensure_root() From 01b41368cad85449980667a583eb949cfa8e5215 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 26 Aug 2024 16:02:16 -0400 Subject: [PATCH 107/124] fix unstructured??? --- bbot/modules/unstructured.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 6e19f4cf1..9c5e58996 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -67,7 +67,7 @@ class unstructured(BaseModule): } deps_apt = ["libmagic-dev", "poppler-utils", "tesseract-ocr", "libreoffice", "pandoc"] - deps_pip = ["unstructured[all-docs]==0.15.5"] + deps_pip = ["unstructured[all-docs]>=0.15.7,<1.0", "nltk>=3.9.0,<4.0"] scope_distance_modifier = 1 From d3922c2c75e379cd7542cce6febed0cb40c3801a Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 26 Aug 2024 16:13:50 -0400 Subject: [PATCH 108/124] fix tests --- bbot/core/helpers/command.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index a13f8b29d..82e0aedfc 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -182,7 +182,11 @@ async def _spawn_proc(self, *command, **kwargs): >>> _spawn_proc("ls", "-l", input="data") (, "data", ["ls", "-l"]) """ - command, kwargs = self._prepare_command_kwargs(command, kwargs) + try: + command, kwargs = self._prepare_command_kwargs(command, kwargs) + except SubprocessError as e: + log.warning(e) + return None, None, None _input = kwargs.pop("input", None) if _input is not None: if kwargs.get("stdin") is not None: From f652f73171a8d98d9cbbd5d2804b58783e50eb8c Mon Sep 17 00:00:00 2001 From: TheTechromancer <20261699+TheTechromancer@users.noreply.github.com> Date: Mon, 26 Aug 2024 23:01:53 -0400 Subject: [PATCH 109/124] Update command.py --- bbot/core/helpers/command.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index 82e0aedfc..6f43a401d 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -286,7 +286,7 @@ def _prepare_command_kwargs(self, command, kwargs): # use full path of binary, if not already specified binary = command[0] if not "/" in binary: - binary_full_path = which(command[0]) + binary_full_path = which(binary) if binary_full_path is None: raise SubprocessError(f'Command "{binary}" was not found') command[0] = binary_full_path From 18d365f791c1494fedfc883926f1335b01a2bced Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 27 Aug 2024 10:26:01 -0400 Subject: [PATCH 110/124] removed fix for https://github.com/blacklanternsecurity/bbot/issues/1691 per @liquidsec --- bbot/modules/internal/excavate.py | 5 ++--- bbot/test/test_step_2/module_tests/test_module_excavate.py | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 0bbdd97fc..e542aa20a 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -684,13 +684,12 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte final_url = "" for url_str in results: if identifier == "url_full": - match = await self.helpers.re.search(self.full_url_regex, url_str) - if not match: + if not await self.helpers.re.search(self.full_url_regex, url_str): self.excavate.debug( f"Rejecting potential full URL [{url_str}] as did not match full_url_regex" ) continue - final_url = match.group() + final_url = url_str self.excavate.debug(f"Discovered Full URL [{final_url}]") elif identifier == "url_attr" and hasattr(event, "parsed_url"): diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 123d42e31..690deb3f5 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -19,7 +19,7 @@ async def setup_before_prep(self, module_test): \\nhttps://www1.test.notreal \\x3dhttps://www2.test.notreal %0ahttps://www3.test.notreal - \\u000ahttps://www4.test.notreal: + \\u000ahttps://www4.test.notreal \nwww5.test.notreal \\x3dwww6.test.notreal %0awww7.test.notreal From 42669c832b4c741c270580a3ca708300ed1f4dd0 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 27 Aug 2024 12:02:23 -0400 Subject: [PATCH 111/124] bump version --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 18fae5509..1bcfe4540 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "bbot" -version = "2.0.0" +version = "2.0.1" description = "OSINT automation for hackers." authors = [ "TheTechromancer", @@ -98,7 +98,7 @@ extend-exclude = "(test_step_1/test_manager_*)" [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v2.0.0{% if branch == "dev" %}.{{ distance }}rc{% endif %}' +format-jinja = 'v2.0.1{% if branch == "dev" %}.{{ distance }}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"] From f2c540048c2b4fee6ad375adeeefdd3732e6576d Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 27 Aug 2024 13:44:33 -0400 Subject: [PATCH 112/124] more optimization --- bbot/modules/deadly/ffuf.py | 39 ++++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index a8e5ec174..d68866bbb 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -39,7 +39,7 @@ async def setup(self): wordlist_url = self.config.get("wordlist", "") self.debug(f"Using wordlist [{wordlist_url}]") self.wordlist = await self.helpers.wordlist(wordlist_url) - self.wordlist_lines = list(self.helpers.read_file(self.wordlist)) + self.wordlist_lines = self.gen_wordlist(self.wordlist) self.tempfile, tempfile_len = self.generate_templist() self.verbose(f"Generated dynamic wordlist with length [{str(tempfile_len)}]") try: @@ -314,7 +314,6 @@ async def execute_ffuf( self.debug("Received invalid JSON from FFUF") def generate_templist(self, prefix=None): - line_count = 0 virtual_file = [] if prefix: @@ -322,21 +321,25 @@ def generate_templist(self, prefix=None): max_lines = self.config.get("lines") - for idx, val in enumerate(self.wordlist_lines): - if idx > max_lines: - break - stripped_val = val.strip().lower() - if stripped_val: - # Check if the word is in the blacklist - if stripped_val in self.blacklist: - self.debug(f"Skipping adding [{stripped_val}] to wordlist because it was in the blacklist") - else: - # Check if it starts with the given prefix (if any) - if not prefix or stripped_val.startswith(prefix): - # Check if it contains any banned characters - if not any(char in self.banned_characters for char in stripped_val): - line_count += 1 - virtual_file.append(stripped_val) + for line in self.wordlist_lines[:max_lines]: + # Check if it starts with the given prefix (if any) + if not prefix or line.startswith(prefix): + virtual_file.append(line) virtual_file.append(self.canary) - return self.helpers.tempfile(virtual_file, pipe=False), line_count + return self.helpers.tempfile(virtual_file, pipe=False), len(virtual_file) + + def generate_wordlist(self, wordlist_file): + wordlist = [] + for line in self.helpers.read_file(wordlist_file): + line = line.strip() + if not line: + continue + if line in self.blacklist: + self.debug(f"Skipping adding [{line}] to wordlist because it was in the blacklist") + continue + if any(x in line for x in self.banned_characters): + self.debug(f"Skipping adding [{line}] to wordlist because has a banned character") + continue + wordlist.append(line) + return wordlist From d367d34aab6024f0c8021296f1b51e4c39919984 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 27 Aug 2024 13:55:19 -0400 Subject: [PATCH 113/124] fix typo --- bbot/modules/deadly/ffuf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index d68866bbb..c1960ac83 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -339,7 +339,7 @@ def generate_wordlist(self, wordlist_file): self.debug(f"Skipping adding [{line}] to wordlist because it was in the blacklist") continue if any(x in line for x in self.banned_characters): - self.debug(f"Skipping adding [{line}] to wordlist because has a banned character") + self.debug(f"Skipping adding [{line}] to wordlist because it has a banned character") continue wordlist.append(line) return wordlist From cd132c128da236db97345b018cd96078d8586c73 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 27 Aug 2024 15:25:08 -0400 Subject: [PATCH 114/124] fix tests --- bbot/modules/deadly/ffuf.py | 6 ++---- bbot/modules/ffuf_shortnames.py | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index c1960ac83..4643c9826 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -39,7 +39,7 @@ async def setup(self): wordlist_url = self.config.get("wordlist", "") self.debug(f"Using wordlist [{wordlist_url}]") self.wordlist = await self.helpers.wordlist(wordlist_url) - self.wordlist_lines = self.gen_wordlist(self.wordlist) + self.wordlist_lines = self.generate_wordlist(self.wordlist) self.tempfile, tempfile_len = self.generate_templist() self.verbose(f"Generated dynamic wordlist with length [{str(tempfile_len)}]") try: @@ -315,15 +315,13 @@ async def execute_ffuf( def generate_templist(self, prefix=None): virtual_file = [] - if prefix: prefix = prefix.strip().lower() - max_lines = self.config.get("lines") for line in self.wordlist_lines[:max_lines]: # Check if it starts with the given prefix (if any) - if not prefix or line.startswith(prefix): + if (not prefix) or line.lower().startswith(prefix): virtual_file.append(line) virtual_file.append(self.canary) diff --git a/bbot/modules/ffuf_shortnames.py b/bbot/modules/ffuf_shortnames.py index a1adcda61..fa6f03ada 100644 --- a/bbot/modules/ffuf_shortnames.py +++ b/bbot/modules/ffuf_shortnames.py @@ -74,7 +74,7 @@ async def setup(self): wordlist = f"{self.helpers.wordlist_dir}/ffuf_shortname_candidates.txt" self.debug(f"Using [{wordlist}] for shortname candidate list") self.wordlist = await self.helpers.wordlist(wordlist) - self.wordlist_lines = list(self.helpers.read_file(self.wordlist)) + self.wordlist_lines = self.generate_wordlist(self.wordlist) wordlist_extensions = self.config.get("wordlist_extensions", "") if not wordlist_extensions: From fb14be572e141976ddedbd72e07704b07b2faec6 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 28 Aug 2024 10:54:32 +0100 Subject: [PATCH 115/124] Release notes contained new lines so was not setting the GITHUB variable correctly --- .github/workflows/version_updater.yaml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/version_updater.yaml b/.github/workflows/version_updater.yaml index 3a67e0c3f..a2ce0bd33 100644 --- a/.github/workflows/version_updater.yaml +++ b/.github/workflows/version_updater.yaml @@ -28,8 +28,9 @@ jobs: response = requests.get('https://api.github.com/repos/projectdiscovery/nuclei/releases/latest') version = response.json()['tag_name'].lstrip('v') release_notes = response.json()['body'] - os.system(f"echo 'latest_version={version}' >> $GITHUB_ENV") - os.system(f"echo 'release_notes={release_notes}' >> $GITHUB_ENV") + with open(os.getenv('GITHUB_ENV'), 'a') as env_file: + env_file.write(f"latest_version={version}\n") + env_file.write(f"release_notes<> $GITHUB_ENV") - os.system(f"echo 'release_notes={release_notes}' >> $GITHUB_ENV") + with open(os.getenv('GITHUB_ENV'), 'a') as env_file: + env_file.write(f"latest_version={version}\n") + env_file.write(f"release_notes< Date: Wed, 28 Aug 2024 10:56:12 +0100 Subject: [PATCH 116/124] Set release notes as heading --- .github/workflows/version_updater.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/version_updater.yaml b/.github/workflows/version_updater.yaml index a2ce0bd33..d2911bcba 100644 --- a/.github/workflows/version_updater.yaml +++ b/.github/workflows/version_updater.yaml @@ -51,7 +51,7 @@ jobs: body: | This PR uses https://api.github.com/repos/projectdiscovery/nuclei/releases/latest to obtain the latest version of nuclei and update the version in bbot/modules/deadly/nuclei.py." - Release notes: + # Release notes: ${{ env.release_notes }} branch: "update-nuclei" committer: GitHub @@ -102,7 +102,7 @@ jobs: body: | This PR uses https://api.github.com/repos/trufflesecurity/trufflehog/releases/latest to obtain the latest version of trufflehog and update the version in bbot/modules/trufflehog.py. - Release notes: + # Release notes: ${{ env.release_notes }} branch: "update-trufflehog" committer: GitHub From 15b4b36aa7bb571a87f189bdf07ba7f91e6d1d2d Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 28 Aug 2024 09:52:57 -0400 Subject: [PATCH 117/124] fix tablesort --- docs/javascripts/tablesort.min.js | 6 ++++++ mkdocs.yml | 3 --- 2 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 docs/javascripts/tablesort.min.js diff --git a/docs/javascripts/tablesort.min.js b/docs/javascripts/tablesort.min.js new file mode 100644 index 000000000..65a83b113 --- /dev/null +++ b/docs/javascripts/tablesort.min.js @@ -0,0 +1,6 @@ +/*! + * tablesort v5.2.1 (2021-10-30) + * http://tristen.ca/tablesort/demo/ + * Copyright (c) 2021 ; Licensed MIT +*/ +!function(){function a(b,c){if(!(this instanceof a))return new a(b,c);if(!b||"TABLE"!==b.tagName)throw new Error("Element must be a table");this.init(b,c||{})}var b=[],c=function(a){var b;return window.CustomEvent&&"function"==typeof window.CustomEvent?b=new CustomEvent(a):(b=document.createEvent("CustomEvent"),b.initCustomEvent(a,!1,!1,void 0)),b},d=function(a,b){return a.getAttribute(b.sortAttribute||"data-sort")||a.textContent||a.innerText||""},e=function(a,b){return a=a.trim().toLowerCase(),b=b.trim().toLowerCase(),a===b?0:a0)if(a.tHead&&a.tHead.rows.length>0){for(e=0;e0&&n.push(m),o++;if(!n)return}for(o=0;o Date: Wed, 28 Aug 2024 10:06:32 -0400 Subject: [PATCH 118/124] bump baddns, lower dnswalk timeout settings --- bbot/modules/baddns.py | 23 +++++++++++++++-------- bbot/modules/baddns_zone.py | 2 +- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/bbot/modules/baddns.py b/bbot/modules/baddns.py index 6dbc7c141..92b12a371 100644 --- a/bbot/modules/baddns.py +++ b/bbot/modules/baddns.py @@ -22,7 +22,7 @@ class baddns(BaseModule): "enable_references": "Enable the references module (off by default)", } module_threads = 8 - deps_pip = ["baddns~=1.1.798"] + deps_pip = ["baddns~=1.1.815"] def select_modules(self): @@ -49,13 +49,20 @@ async def handle_event(self, event): tasks = [] for ModuleClass in self.select_modules(): - module_instance = ModuleClass( - event.data, - http_client_class=self.scan.helpers.web.AsyncClient, - dns_client=self.scan.helpers.dns.resolver, - custom_nameservers=self.custom_nameservers, - signatures=self.signatures, - ) + kwargs = { + "http_client_class": self.scan.helpers.web.AsyncClient, + "dns_client": self.scan.helpers.dns.resolver, + "custom_nameservers": self.custom_nameservers, + "signatures": self.signatures, + } + + if ModuleClass.name == "NS": + kwargs["raw_query_max_retries"] = 1 + kwargs["raw_query_timeout"] = 4.0 + kwargs["raw_query_retry_wait"] = 0 + + module_instance = ModuleClass(event.data, **kwargs) + tasks.append((module_instance, asyncio.create_task(module_instance.dispatch()))) for module_instance, task in tasks: diff --git a/bbot/modules/baddns_zone.py b/bbot/modules/baddns_zone.py index 5cb916ad8..a356f61b3 100644 --- a/bbot/modules/baddns_zone.py +++ b/bbot/modules/baddns_zone.py @@ -17,7 +17,7 @@ class baddns_zone(baddns_module): "only_high_confidence": "Do not emit low-confidence or generic detections", } module_threads = 8 - deps_pip = ["baddns~=1.1.798"] + deps_pip = ["baddns~=1.1.815"] def select_modules(self): selected_modules = [] From c9c7ab7f21f21d743e0e60de9dfe0c7a3a7401a1 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 28 Aug 2024 10:08:40 -0400 Subject: [PATCH 119/124] fix tag error --- bbot/core/event/base.py | 12 ++++++++---- bbot/core/helpers/web/web.py | 2 ++ bbot/modules/internal/dnsresolve.py | 2 ++ bbot/modules/internal/excavate.py | 4 ++-- .../test_step_2/module_tests/test_module_excavate.py | 4 ++++ .../module_tests/test_module_github_codesearch.py | 3 ++- 6 files changed, 20 insertions(+), 7 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 4b891856c..3eb10625f 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -408,6 +408,10 @@ def tags(self, tags): def add_tag(self, tag): self._tags.add(tagify(tag)) + def add_tags(self, tags): + for tag in set(tags): + self.add_tag(tag) + def remove_tag(self, tag): with suppress(KeyError): self._tags.remove(tagify(tag)) @@ -482,10 +486,10 @@ def scope_distance(self, scope_distance): self.remove_tag("in-scope") self.add_tag(f"distance-{new_scope_distance}") self._scope_distance = new_scope_distance - # apply recursively to parent events - parent_scope_distance = getattr(self.parent, "scope_distance", None) - if parent_scope_distance is not None and self.parent is not self: - self.parent.scope_distance = scope_distance + 1 + # apply recursively to parent events + parent_scope_distance = getattr(self.parent, "scope_distance", None) + if parent_scope_distance is not None and self.parent is not self: + self.parent.scope_distance = new_scope_distance + 1 @property def scope_description(self): diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index 6f7f1b578..66be930c1 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -511,6 +511,8 @@ def beautifulsoup( def is_login_page(self, html): """ + TODO: convert this into an excavate YARA rule + Determines if the provided HTML content contains a login page. This function parses the HTML to search for forms with input fields typically used for diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index 0877c3aa7..42ec8cf94 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -109,6 +109,8 @@ async def handle_event(self, event, **kwargs): main_host_event.scope_distance = 0 await self.handle_wildcard_event(main_host_event) + in_dns_scope = -1 < main_host_event.scope_distance < self._dns_search_distance + if event != main_host_event: await self.emit_event(main_host_event) for raw_record_event in raw_record_events: diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index e542aa20a..794adae25 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -211,7 +211,7 @@ async def report_prep(self, event_data, event_type, event, tags): event_draft = self.excavate.make_event(event_data, event_type, parent=event) if not event_draft: return None - event_draft.tags = tags + event_draft.add_tags(tags) return event_draft async def report( @@ -734,7 +734,7 @@ async def report_prep(self, event_data, event_type, event, tags, **kwargs): exceeds_max_links = urls_found > self.excavate.scan.web_spider_links_per_page and url_in_scope if exceeds_max_links: tags.append("spider-max") - event_draft.tags = tags + event_draft.add_tags(tags) return event_draft class HostnameExtractor(ExcavateRule): diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 690deb3f5..e5655f156 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -86,6 +86,10 @@ def check(self, module_test, events): e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/relative.html" and "spider-max" not in e.tags + and "endpoint" in e.tags + and "extension-html" in e.tags + and "in-scope" in e.tags + and e.scope_distance == 0 for e in events ) diff --git a/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py b/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py index ad3c5eae7..03c519a8c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py +++ b/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py @@ -13,10 +13,11 @@ class TestGithub_Codesearch(ModuleTestBase): "/projectdiscovery/nuclei/06f242e5fce3439b7418877676810cbf57934875/v2/cmd/cve-annotate/main.go" ) github_file_url = f"http://127.0.0.1:8888{github_file_endpoint}" + github_file_content = "-----BEGIN PGP PRIVATE KEY BLOCK-----" async def setup_before_prep(self, module_test): expect_args = {"method": "GET", "uri": self.github_file_endpoint} - respond_args = {"response_data": "-----BEGIN PGP PRIVATE KEY BLOCK-----"} + respond_args = {"response_data": self.github_file_content} module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) module_test.httpx_mock.add_response(url="https://api.github.com/zen") From 26a60decbaf726021a545626bb00e4fd62d2dc1f Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 28 Aug 2024 10:30:06 -0400 Subject: [PATCH 120/124] matching value with bbot default --- bbot/modules/baddns.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/baddns.py b/bbot/modules/baddns.py index 92b12a371..7425a985e 100644 --- a/bbot/modules/baddns.py +++ b/bbot/modules/baddns.py @@ -58,7 +58,7 @@ async def handle_event(self, event): if ModuleClass.name == "NS": kwargs["raw_query_max_retries"] = 1 - kwargs["raw_query_timeout"] = 4.0 + kwargs["raw_query_timeout"] = 5.0 kwargs["raw_query_retry_wait"] = 0 module_instance = ModuleClass(event.data, **kwargs) From 34663e3adb522474cc7f0309645f5621ebf6e7b6 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Thu, 29 Aug 2024 15:41:56 +0100 Subject: [PATCH 121/124] Filter out already processed events from trufflehog --- bbot/modules/trufflehog.py | 8 ++++++++ .../test_step_2/module_tests/test_module_trufflehog.py | 3 +++ 2 files changed, 11 insertions(+) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index 002cdac9a..ff70e114e 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -1,3 +1,4 @@ +import os import json from bbot.modules.base import BaseModule @@ -59,6 +60,7 @@ async def setup(self): if not self.github_token: self.deleted_forks = False return None, "A github api_key must be provided to the github modules for deleted forks to be scanned" + self.processed = set() return True async def filter_event(self, event): @@ -70,6 +72,11 @@ async def filter_event(self, event): return False, "Module only accepts github CODE_REPOSITORY events" else: return False, "Deleted forks is not enabled" + else: + path = event.data["path"] + for processed_path in self.processed: + if os.path.commonpath([path, processed_path]) == processed_path: + return False, "Parent folder has already been processed" return True async def handle_event(self, event): @@ -80,6 +87,7 @@ async def handle_event(self, event): module = "github-experimental" else: path = event.data["path"] + self.processed.add(path) if "git" in event.tags: module = "git" elif "docker" in event.tags: diff --git a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py index 7dde0d38a..68285a001 100644 --- a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py +++ b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py @@ -13,6 +13,7 @@ class TestTrufflehog(ModuleTestBase): "github_org", "speculate", "git_clone", + "unstructured", "github_workflows", "dockerhub", "docker_pull", @@ -854,6 +855,7 @@ def check(self, module_test, events): and "Raw result: [https://admin:admin@the-internet.herokuapp.com]" in e.data["description"] and "RawV2 result: [https://admin:admin@the-internet.herokuapp.com/basic_auth]" in e.data["description"] ] + # Trufflehog should find 3 verifiable secrets, 1 from the github, 1 from the workflow log and 1 from the docker image. Unstructured will extract the text file but trufflehog should reject it as its already scanned the containing folder assert 3 == len(vuln_events), "Failed to find secret in events" github_repo_event = [e for e in vuln_events if "test_keys" in e.data["description"]][0].parent folder = Path(github_repo_event.data["path"]) @@ -901,6 +903,7 @@ def check(self, module_test, events): and "Potential Secret Found." in e.data["description"] and "Raw result: [https://admin:admin@internal.host.com]" in e.data["description"] ] + # Trufflehog should find 3 unverifiable secrets, 1 from the github, 1 from the workflow log and 1 from the docker image. Unstructured will extract the text file but trufflehog should reject it as its already scanned the containing folder assert 3 == len(finding_events), "Failed to find secret in events" github_repo_event = [e for e in finding_events if "test_keys" in e.data["description"]][0].parent folder = Path(github_repo_event.data["path"]) From adc3140613814dad1e7fd2835766c675265f76e0 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Thu, 29 Aug 2024 18:08:10 +0100 Subject: [PATCH 122/124] Use Path instead of os --- bbot/modules/trufflehog.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index ff70e114e..2f482d598 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -1,5 +1,5 @@ -import os import json +from pathlib import Path from bbot.modules.base import BaseModule @@ -74,8 +74,10 @@ async def filter_event(self, event): return False, "Deleted forks is not enabled" else: path = event.data["path"] - for processed_path in self.processed: - if os.path.commonpath([path, processed_path]) == processed_path: + for processed in self.processed: + processed_path = Path(processed) + new_path = Path(path) + if new_path.is_relative_to(processed_path): return False, "Parent folder has already been processed" return True From c09bc0eeec621662ba3ceeaf6e6f1e5a3d9f6cbb Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Thu, 29 Aug 2024 18:42:17 +0100 Subject: [PATCH 123/124] Add an option to trufflehog to allow users to specify their own custom config file --- bbot/modules/trufflehog.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index 002cdac9a..8f016a9f6 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -14,12 +14,14 @@ class trufflehog(BaseModule): options = { "version": "3.81.9", + "config": "", "only_verified": True, "concurrency": 8, "deleted_forks": False, } options_desc = { "version": "trufflehog version", + "config": "File path to YAML trufflehog config", "only_verified": "Only report credentials that have been verified", "concurrency": "Number of concurrent workers", "deleted_forks": "Scan for deleted github forks. WARNING: This is SLOW. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours.", @@ -40,6 +42,9 @@ class trufflehog(BaseModule): async def setup(self): self.verified = self.config.get("only_verified", True) + self.config_file = self.config.get("config", "") + if self.config_file: + self.config_file = await self.helpers.wordlist(self.config_file) self.concurrency = int(self.config.get("concurrency", 8)) self.deleted_forks = self.config.get("deleted_forks", False) @@ -140,6 +145,8 @@ async def execute_trufflehog(self, module, path): ] if self.verified: command.append("--only-verified") + if self.config_file: + command.append("--config=" + str(self.config_file)) command.append("--concurrency=" + str(self.concurrency)) if module == "git": command.append("git") From cf263e11defd7cb14b50b3b7e98490dad3340741 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Thu, 29 Aug 2024 18:48:38 +0100 Subject: [PATCH 124/124] Changed option description --- bbot/modules/trufflehog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index 8f016a9f6..10a3f2ee6 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -21,7 +21,7 @@ class trufflehog(BaseModule): } options_desc = { "version": "trufflehog version", - "config": "File path to YAML trufflehog config", + "config": "File path or URL to YAML trufflehog config", "only_verified": "Only report credentials that have been verified", "concurrency": "Number of concurrent workers", "deleted_forks": "Scan for deleted github forks. WARNING: This is SLOW. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours.",