From e89ed26f0e2493e69a65c94fcaa042b7693ea0c9 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 27 Nov 2024 18:27:41 +0000 Subject: [PATCH 01/85] Initial proof of concept of orm.yaml --- .gitignore | 2 +- poetry.lock | 2150 ++++++++++++++++------------- pyproject.toml | 11 +- sqlsynthgen/main.py | 44 +- sqlsynthgen/make.py | 57 +- sqlsynthgen/remove.py | 16 +- sqlsynthgen/serialize_metadata.py | 189 +++ 7 files changed, 1450 insertions(+), 1019 deletions(-) create mode 100644 sqlsynthgen/serialize_metadata.py diff --git a/.gitignore b/.gitignore index e0df4ec1..71ef09b1 100644 --- a/.gitignore +++ b/.gitignore @@ -144,5 +144,5 @@ docs/temp/* *.swp ssg.py -orm.py +orm.yaml src-stats.yaml diff --git a/poetry.lock b/poetry.lock index 4fac904c..1da95e9f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" +version = "0.7.16" +description = "A light, configurable Sphinx theme" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] @@ -23,95 +23,123 @@ files = [ [[package]] name = "astroid" -version = "3.0.1" +version = "3.3.5" description = "An abstract syntax tree for Python with inference support." optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "astroid-3.0.1-py3-none-any.whl", hash = "sha256:7d5895c9825e18079c5aeac0572bc2e4c83205c95d416e0b4fee8bc361d2d9ca"}, - {file = "astroid-3.0.1.tar.gz", hash = "sha256:86b0bb7d7da0be1a7c4aedb7974e391b32d4ed89e33de6ed6902b4b15c97577e"}, + {file = "astroid-3.3.5-py3-none-any.whl", hash = "sha256:a9d1c946ada25098d790e079ba2a1b112157278f3fb7e718ae6a9252f5835dc8"}, + {file = "astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d"}, ] [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +[[package]] +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + [[package]] name = "asyncpg" -version = "0.27.0" +version = "0.30.0" description = "An asyncio PostgreSQL driver" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "asyncpg-0.27.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fca608d199ffed4903dce1bcd97ad0fe8260f405c1c225bdf0002709132171c2"}, - {file = "asyncpg-0.27.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20b596d8d074f6f695c13ffb8646d0b6bb1ab570ba7b0cfd349b921ff03cfc1e"}, - {file = "asyncpg-0.27.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a6206210c869ebd3f4eb9e89bea132aefb56ff3d1b7dd7e26b102b17e27bbb1"}, - {file = "asyncpg-0.27.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7a94c03386bb95456b12c66026b3a87d1b965f0f1e5733c36e7229f8f137747"}, - {file = "asyncpg-0.27.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bfc3980b4ba6f97138b04f0d32e8af21d6c9fa1f8e6e140c07d15690a0a99279"}, - {file = "asyncpg-0.27.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9654085f2b22f66952124de13a8071b54453ff972c25c59b5ce1173a4283ffd9"}, - {file = "asyncpg-0.27.0-cp310-cp310-win32.whl", hash = "sha256:879c29a75969eb2722f94443752f4720d560d1e748474de54ae8dd230bc4956b"}, - {file = "asyncpg-0.27.0-cp310-cp310-win_amd64.whl", hash = "sha256:ab0f21c4818d46a60ca789ebc92327d6d874d3b7ccff3963f7af0a21dc6cff52"}, - {file = "asyncpg-0.27.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:18f77e8e71e826ba2d0c3ba6764930776719ae2b225ca07e014590545928b576"}, - {file = "asyncpg-0.27.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2232d4625c558f2aa001942cac1d7952aa9f0dbfc212f63bc754277769e1ef2"}, - {file = "asyncpg-0.27.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a3a4ff43702d39e3c97a8786314123d314e0f0e4dabc8367db5b665c93914de"}, - {file = "asyncpg-0.27.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccddb9419ab4e1c48742457d0c0362dbdaeb9b28e6875115abfe319b29ee225d"}, - {file = "asyncpg-0.27.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:768e0e7c2898d40b16d4ef7a0b44e8150db3dd8995b4652aa1fe2902e92c7df8"}, - {file = "asyncpg-0.27.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609054a1f47292a905582a1cfcca51a6f3f30ab9d822448693e66fdddde27920"}, - {file = "asyncpg-0.27.0-cp311-cp311-win32.whl", hash = "sha256:8113e17cfe236dc2277ec844ba9b3d5312f61bd2fdae6d3ed1c1cdd75f6cf2d8"}, - {file = "asyncpg-0.27.0-cp311-cp311-win_amd64.whl", hash = "sha256:bb71211414dd1eeb8d31ec529fe77cff04bf53efc783a5f6f0a32d84923f45cf"}, - {file = "asyncpg-0.27.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4750f5cf49ed48a6e49c6e5aed390eee367694636c2dcfaf4a273ca832c5c43c"}, - {file = "asyncpg-0.27.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:eca01eb112a39d31cc4abb93a5aef2a81514c23f70956729f42fb83b11b3483f"}, - {file = "asyncpg-0.27.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5710cb0937f696ce303f5eed6d272e3f057339bb4139378ccecafa9ee923a71c"}, - {file = "asyncpg-0.27.0-cp37-cp37m-win_amd64.whl", hash = "sha256:71cca80a056ebe19ec74b7117b09e650990c3ca535ac1c35234a96f65604192f"}, - {file = "asyncpg-0.27.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4bb366ae34af5b5cabc3ac6a5347dfb6013af38c68af8452f27968d49085ecc0"}, - {file = "asyncpg-0.27.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16ba8ec2e85d586b4a12bcd03e8d29e3d99e832764d6a1d0b8c27dbbe4a2569d"}, - {file = "asyncpg-0.27.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d20dea7b83651d93b1eb2f353511fe7fd554752844523f17ad30115d8b9c8cd6"}, - {file = "asyncpg-0.27.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e56ac8a8237ad4adec97c0cd4728596885f908053ab725e22900b5902e7f8e69"}, - {file = "asyncpg-0.27.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf21ebf023ec67335258e0f3d3ad7b91bb9507985ba2b2206346de488267cad0"}, - {file = "asyncpg-0.27.0-cp38-cp38-win32.whl", hash = "sha256:69aa1b443a182b13a17ff926ed6627af2d98f62f2fe5890583270cc4073f63bf"}, - {file = "asyncpg-0.27.0-cp38-cp38-win_amd64.whl", hash = "sha256:62932f29cf2433988fcd799770ec64b374a3691e7902ecf85da14d5e0854d1ea"}, - {file = "asyncpg-0.27.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fddcacf695581a8d856654bc4c8cfb73d5c9df26d5f55201722d3e6a699e9629"}, - {file = "asyncpg-0.27.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7d8585707ecc6661d07367d444bbaa846b4e095d84451340da8df55a3757e152"}, - {file = "asyncpg-0.27.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:975a320baf7020339a67315284a4d3bf7460e664e484672bd3e71dbd881bc692"}, - {file = "asyncpg-0.27.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2232ebae9796d4600a7819fc383da78ab51b32a092795f4555575fc934c1c89d"}, - {file = "asyncpg-0.27.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:88b62164738239f62f4af92567b846a8ef7cf8abf53eddd83650603de4d52163"}, - {file = "asyncpg-0.27.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eb4b2fdf88af4fb1cc569781a8f933d2a73ee82cd720e0cb4edabbaecf2a905b"}, - {file = "asyncpg-0.27.0-cp39-cp39-win32.whl", hash = "sha256:8934577e1ed13f7d2d9cea3cc016cc6f95c19faedea2c2b56a6f94f257cea672"}, - {file = "asyncpg-0.27.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b6499de06fe035cf2fa932ec5617ed3f37d4ebbf663b655922e105a484a6af9"}, - {file = "asyncpg-0.27.0.tar.gz", hash = "sha256:720986d9a4705dd8a40fdf172036f5ae787225036a7eb46e704c45aa8f62c054"}, + {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, + {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, + {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f"}, + {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af"}, + {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75"}, + {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f"}, + {file = "asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf"}, + {file = "asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50"}, + {file = "asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a"}, + {file = "asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed"}, + {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a"}, + {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956"}, + {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056"}, + {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454"}, + {file = "asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d"}, + {file = "asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f"}, + {file = "asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e"}, + {file = "asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a"}, + {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3"}, + {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737"}, + {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a"}, + {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af"}, + {file = "asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e"}, + {file = "asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305"}, + {file = "asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70"}, + {file = "asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3"}, + {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33"}, + {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4"}, + {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4"}, + {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba"}, + {file = "asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590"}, + {file = "asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e"}, + {file = "asyncpg-0.30.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d"}, + {file = "asyncpg-0.30.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168"}, + {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb"}, + {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f"}, + {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38"}, + {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34"}, + {file = "asyncpg-0.30.0-cp38-cp38-win32.whl", hash = "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4"}, + {file = "asyncpg-0.30.0-cp38-cp38-win_amd64.whl", hash = "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b"}, + {file = "asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad"}, + {file = "asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff"}, + {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708"}, + {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144"}, + {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb"}, + {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547"}, + {file = "asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a"}, + {file = "asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773"}, + {file = "asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851"}, ] +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11.0\""} + [package.extras] -dev = ["Cython (>=0.29.24,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "flake8 (>=5.0.4,<5.1.0)", "pytest (>=6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "uvloop (>=0.15.3)"] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["flake8 (>=5.0.4,<5.1.0)", "uvloop (>=0.15.3)"] +docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] +gssauth = ["gssapi", "sspilib"] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"] [[package]] name = "attrs" -version = "23.1.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "babel" -version = "2.13.0" +version = "2.16.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Babel-2.13.0-py3-none-any.whl", hash = "sha256:fbfcae1575ff78e26c7449136f1abbefc3c13ce542eeb13d43d50d8b047216ec"}, - {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] @@ -119,29 +147,33 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "black" -version = "23.10.0" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.10.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:f8dc7d50d94063cdfd13c82368afd8588bac4ce360e4224ac399e769d6704e98"}, - {file = "black-23.10.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:f20ff03f3fdd2fd4460b4f631663813e57dc277e37fb216463f3b907aa5a9bdd"}, - {file = "black-23.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3d9129ce05b0829730323bdcb00f928a448a124af5acf90aa94d9aba6969604"}, - {file = "black-23.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:960c21555be135c4b37b7018d63d6248bdae8514e5c55b71e994ad37407f45b8"}, - {file = "black-23.10.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:30b78ac9b54cf87bcb9910ee3d499d2bc893afd52495066c49d9ee6b21eee06e"}, - {file = "black-23.10.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:0e232f24a337fed7a82c1185ae46c56c4a6167fb0fe37411b43e876892c76699"}, - {file = "black-23.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31946ec6f9c54ed7ba431c38bc81d758970dd734b96b8e8c2b17a367d7908171"}, - {file = "black-23.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:c870bee76ad5f7a5ea7bd01dc646028d05568d33b0b09b7ecfc8ec0da3f3f39c"}, - {file = "black-23.10.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:6901631b937acbee93c75537e74f69463adaf34379a04eef32425b88aca88a23"}, - {file = "black-23.10.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:481167c60cd3e6b1cb8ef2aac0f76165843a374346aeeaa9d86765fe0dd0318b"}, - {file = "black-23.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74892b4b836e5162aa0452393112a574dac85e13902c57dfbaaf388e4eda37c"}, - {file = "black-23.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:47c4510f70ec2e8f9135ba490811c071419c115e46f143e4dce2ac45afdcf4c9"}, - {file = "black-23.10.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:76baba9281e5e5b230c9b7f83a96daf67a95e919c2dfc240d9e6295eab7b9204"}, - {file = "black-23.10.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:a3c2ddb35f71976a4cfeca558848c2f2f89abc86b06e8dd89b5a65c1e6c0f22a"}, - {file = "black-23.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db451a3363b1e765c172c3fd86213a4ce63fb8524c938ebd82919bf2a6e28c6a"}, - {file = "black-23.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:7fb5fc36bb65160df21498d5a3dd330af8b6401be3f25af60c6ebfe23753f747"}, - {file = "black-23.10.0-py3-none-any.whl", hash = "sha256:e223b731a0e025f8ef427dd79d8cd69c167da807f5710add30cdf131f13dd62e"}, - {file = "black-23.10.0.tar.gz", hash = "sha256:31b9f87b277a68d0e99d2905edae08807c007973eaa609da5f0c62def6b7c0bd"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -155,19 +187,19 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2023.7.22" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -183,101 +215,116 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.0" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, - {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -322,27 +369,28 @@ typing-inspect = ">=0.4.0,<1" [[package]] name = "dill" -version = "0.3.7" +version = "0.3.9" description = "serialize all of Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, ] [package.extras] graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "distlib" -version = "0.3.7" +version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] @@ -358,19 +406,19 @@ files = [ [[package]] name = "filelock" -version = "3.12.4" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, - {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] -typing = ["typing-extensions (>=4.7.1)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "graphviz" @@ -390,75 +438,88 @@ test = ["mock (>=3)", "pytest (>=5.2)", "pytest-cov", "pytest-mock (>=2)"] [[package]] name = "greenlet" -version = "2.0.2" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -files = [ - {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, - {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, - {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, - {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, - {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, - {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, - {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, - {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, - {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, - {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, - {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, - {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, - {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, - {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, - {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, - {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, - {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, - {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, - {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, - {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, - {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, - {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] -docs = ["Sphinx", "docutils (<0.18)"] +docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] [[package]] @@ -473,13 +534,13 @@ files = [ [[package]] name = "identify" -version = "2.5.30" +version = "2.6.2" description = "File identification library for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "identify-2.5.30-py2.py3-none-any.whl", hash = "sha256:afe67f26ae29bab007ec21b03d4114f41316ab9dd15aa8736a167481e108da54"}, - {file = "identify-2.5.30.tar.gz", hash = "sha256:f302a4256a15c849b91cfcdcec052a8ce914634b2f77ae87dad29cd749f2d88d"}, + {file = "identify-2.6.2-py2.py3-none-any.whl", hash = "sha256:c097384259f49e372f4ea00a19719d95ae27dd5ff0fd77ad630aa891306b82f3"}, + {file = "identify-2.6.2.tar.gz", hash = "sha256:fab5c716c24d7a789775228823797296a2994b075fb6080ac83a102772a98cbd"}, ] [package.extras] @@ -487,15 +548,18 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.4" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "imagesize" version = "1.4.1" @@ -509,68 +573,73 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "inflect" -version = "7.0.0" -description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" +version = "7.4.0" +description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles" optional = false python-versions = ">=3.8" files = [ - {file = "inflect-7.0.0-py3-none-any.whl", hash = "sha256:9544afed6182176e43955c44b1acdaed30f9b2b56c16d1fc5b222d98218b546e"}, - {file = "inflect-7.0.0.tar.gz", hash = "sha256:63da9325ad29da81ec23e055b41225795ab793b4ecb483be5dc1fa363fd4717e"}, + {file = "inflect-7.4.0-py3-none-any.whl", hash = "sha256:85af0997ee2bda942b1c1eed8c8a827abda91aa3e22d1efaa0eea817f9350ce7"}, + {file = "inflect-7.4.0.tar.gz", hash = "sha256:904baa17cc2cb74827a6c27b95692e95670dadc72b208b3e8c1c05aeed47026b"}, ] [package.dependencies] -pydantic = ">=1.9.1" -typing-extensions = "*" +more-itertools = ">=8.5.0" +typeguard = ">=4.0.1" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["pygments", "pytest (>=6,!=8.1.*)"] +type = ["pytest-mypy"] [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -604,13 +673,13 @@ requests = ">=2.31.0,<3.0.0" [[package]] name = "jsonschema" -version = "4.19.1" +version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, - {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, ] [package.dependencies] @@ -621,116 +690,126 @@ rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" -version = "2023.7.1" +version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, - {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, ] [package.dependencies] -referencing = ">=0.28.0" +referencing = ">=0.31.0" [[package]] name = "markdown2" -version = "2.4.10" +version = "2.5.1" description = "A fast and complete Python implementation of Markdown" optional = false -python-versions = ">=3.5, <4" +python-versions = "<4,>=3.8" files = [ - {file = "markdown2-2.4.10-py2.py3-none-any.whl", hash = "sha256:e6105800483783831f5dc54f827aa5b44eb137ecef5a70293d8ecfbb4109ecc6"}, - {file = "markdown2-2.4.10.tar.gz", hash = "sha256:cdba126d90dc3aef6f4070ac342f974d63f415678959329cc7909f96cc235d72"}, + {file = "markdown2-2.5.1-py2.py3-none-any.whl", hash = "sha256:190ae60a4bd0425c60c863bede18a9f3d45b1cbf3fbc9f40b4fac336ff2c520b"}, + {file = "markdown2-2.5.1.tar.gz", hash = "sha256:12fc04ea5a87f7bb4b65acf5bf3af1183b20838cc7d543b74c92ec7eea4bbc74"}, ] [package.extras] -all = ["pygments (>=2.7.3)", "wavedrom"] +all = ["latex2mathml", "pygments (>=2.7.3)", "wavedrom"] code-syntax-highlighting = ["pygments (>=2.7.3)"] +latex = ["latex2mathml"] wavedrom = ["wavedrom"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] name = "marshmallow" -version = "3.20.1" +version = "3.23.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"}, - {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"}, + {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, + {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"] -tests = ["pytest", "pytz", "simplejson"] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "simplejson"] [[package]] name = "mccabe" @@ -754,50 +833,68 @@ files = [ {file = "mimesis-6.1.1.tar.gz", hash = "sha256:044ac378c61db0e06832ff722548fd6e604881d36bc938002e0bd5b85eeb6a98"}, ] +[[package]] +name = "more-itertools" +version = "10.5.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.8" +files = [ + {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"}, + {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"}, +] + [[package]] name = "mypy" -version = "1.6.1" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, - {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, - {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, - {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, - {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, - {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, - {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, - {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, - {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, - {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, - {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, - {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, - {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, - {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, - {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, - {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, - {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, - {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, - {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, - {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, - {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, - {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, - {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, - {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, - {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, - {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, - {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -813,169 +910,297 @@ files = [ [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[package.dependencies] -setuptools = "*" +[[package]] +name = "numpy" +version = "2.0.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, + {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, + {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, + {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, + {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, + {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, + {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, + {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, + {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, + {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, +] [[package]] name = "numpy" -version = "1.26.1" +version = "2.1.3" description = "Fundamental package for array computing in Python" optional = false -python-versions = "<3.13,>=3.9" -files = [ - {file = "numpy-1.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82e871307a6331b5f09efda3c22e03c095d957f04bf6bc1804f30048d0e5e7af"}, - {file = "numpy-1.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdd9ec98f0063d93baeb01aad472a1a0840dee302842a2746a7a8e92968f9575"}, - {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d78f269e0c4fd365fc2992c00353e4530d274ba68f15e968d8bc3c69ce5f5244"}, - {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ab9163ca8aeb7fd32fe93866490654d2f7dda4e61bc6297bf72ce07fdc02f67"}, - {file = "numpy-1.26.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:78ca54b2f9daffa5f323f34cdf21e1d9779a54073f0018a3094ab907938331a2"}, - {file = "numpy-1.26.1-cp310-cp310-win32.whl", hash = "sha256:d1cfc92db6af1fd37a7bb58e55c8383b4aa1ba23d012bdbba26b4bcca45ac297"}, - {file = "numpy-1.26.1-cp310-cp310-win_amd64.whl", hash = "sha256:d2984cb6caaf05294b8466966627e80bf6c7afd273279077679cb010acb0e5ab"}, - {file = "numpy-1.26.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd7837b2b734ca72959a1caf3309457a318c934abef7a43a14bb984e574bbb9a"}, - {file = "numpy-1.26.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c59c046c31a43310ad0199d6299e59f57a289e22f0f36951ced1c9eac3665b9"}, - {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d58e8c51a7cf43090d124d5073bc29ab2755822181fcad978b12e144e5e5a4b3"}, - {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6081aed64714a18c72b168a9276095ef9155dd7888b9e74b5987808f0dd0a974"}, - {file = "numpy-1.26.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:97e5d6a9f0702c2863aaabf19f0d1b6c2628fbe476438ce0b5ce06e83085064c"}, - {file = "numpy-1.26.1-cp311-cp311-win32.whl", hash = "sha256:b9d45d1dbb9de84894cc50efece5b09939752a2d75aab3a8b0cef6f3a35ecd6b"}, - {file = "numpy-1.26.1-cp311-cp311-win_amd64.whl", hash = "sha256:3649d566e2fc067597125428db15d60eb42a4e0897fc48d28cb75dc2e0454e53"}, - {file = "numpy-1.26.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d1bd82d539607951cac963388534da3b7ea0e18b149a53cf883d8f699178c0f"}, - {file = "numpy-1.26.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd5ced4e5a96dac6725daeb5242a35494243f2239244fad10a90ce58b071d24"}, - {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03fb25610ef560a6201ff06df4f8105292ba56e7cdd196ea350d123fc32e24e"}, - {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcfaf015b79d1f9f9c9fd0731a907407dc3e45769262d657d754c3a028586124"}, - {file = "numpy-1.26.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e509cbc488c735b43b5ffea175235cec24bbc57b227ef1acc691725beb230d1c"}, - {file = "numpy-1.26.1-cp312-cp312-win32.whl", hash = "sha256:af22f3d8e228d84d1c0c44c1fbdeb80f97a15a0abe4f080960393a00db733b66"}, - {file = "numpy-1.26.1-cp312-cp312-win_amd64.whl", hash = "sha256:9f42284ebf91bdf32fafac29d29d4c07e5e9d1af862ea73686581773ef9e73a7"}, - {file = "numpy-1.26.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb894accfd16b867d8643fc2ba6c8617c78ba2828051e9a69511644ce86ce83e"}, - {file = "numpy-1.26.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e44ccb93f30c75dfc0c3aa3ce38f33486a75ec9abadabd4e59f114994a9c4617"}, - {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9696aa2e35cc41e398a6d42d147cf326f8f9d81befcb399bc1ed7ffea339b64e"}, - {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5b411040beead47a228bde3b2241100454a6abde9df139ed087bd73fc0a4908"}, - {file = "numpy-1.26.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e11668d6f756ca5ef534b5be8653d16c5352cbb210a5c2a79ff288e937010d5"}, - {file = "numpy-1.26.1-cp39-cp39-win32.whl", hash = "sha256:d1d2c6b7dd618c41e202c59c1413ef9b2c8e8a15f5039e344af64195459e3104"}, - {file = "numpy-1.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:59227c981d43425ca5e5c01094d59eb14e8772ce6975d4b2fc1e106a833d5ae2"}, - {file = "numpy-1.26.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06934e1a22c54636a059215d6da99e23286424f316fddd979f5071093b648668"}, - {file = "numpy-1.26.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76ff661a867d9272cd2a99eed002470f46dbe0943a5ffd140f49be84f68ffc42"}, - {file = "numpy-1.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6965888d65d2848e8768824ca8288db0a81263c1efccec881cb35a0d805fcd2f"}, - {file = "numpy-1.26.1.tar.gz", hash = "sha256:c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe"}, +python-versions = ">=3.10" +files = [ + {file = "numpy-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c894b4305373b9c5576d7a12b473702afdf48ce5369c074ba304cc5ad8730dff"}, + {file = "numpy-2.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b47fbb433d3260adcd51eb54f92a2ffbc90a4595f8970ee00e064c644ac788f5"}, + {file = "numpy-2.1.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:825656d0743699c529c5943554d223c021ff0494ff1442152ce887ef4f7561a1"}, + {file = "numpy-2.1.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:6a4825252fcc430a182ac4dee5a505053d262c807f8a924603d411f6718b88fd"}, + {file = "numpy-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e711e02f49e176a01d0349d82cb5f05ba4db7d5e7e0defd026328e5cfb3226d3"}, + {file = "numpy-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78574ac2d1a4a02421f25da9559850d59457bac82f2b8d7a44fe83a64f770098"}, + {file = "numpy-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c7662f0e3673fe4e832fe07b65c50342ea27d989f92c80355658c7f888fcc83c"}, + {file = "numpy-2.1.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fa2d1337dc61c8dc417fbccf20f6d1e139896a30721b7f1e832b2bb6ef4eb6c4"}, + {file = "numpy-2.1.3-cp310-cp310-win32.whl", hash = "sha256:72dcc4a35a8515d83e76b58fdf8113a5c969ccd505c8a946759b24e3182d1f23"}, + {file = "numpy-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:ecc76a9ba2911d8d37ac01de72834d8849e55473457558e12995f4cd53e778e0"}, + {file = "numpy-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4d1167c53b93f1f5d8a139a742b3c6f4d429b54e74e6b57d0eff40045187b15d"}, + {file = "numpy-2.1.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c80e4a09b3d95b4e1cac08643f1152fa71a0a821a2d4277334c88d54b2219a41"}, + {file = "numpy-2.1.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:576a1c1d25e9e02ed7fa5477f30a127fe56debd53b8d2c89d5578f9857d03ca9"}, + {file = "numpy-2.1.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:973faafebaae4c0aaa1a1ca1ce02434554d67e628b8d805e61f874b84e136b09"}, + {file = "numpy-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:762479be47a4863e261a840e8e01608d124ee1361e48b96916f38b119cfda04a"}, + {file = "numpy-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc6f24b3d1ecc1eebfbf5d6051faa49af40b03be1aaa781ebdadcbc090b4539b"}, + {file = "numpy-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:17ee83a1f4fef3c94d16dc1802b998668b5419362c8a4f4e8a491de1b41cc3ee"}, + {file = "numpy-2.1.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15cb89f39fa6d0bdfb600ea24b250e5f1a3df23f901f51c8debaa6a5d122b2f0"}, + {file = "numpy-2.1.3-cp311-cp311-win32.whl", hash = "sha256:d9beb777a78c331580705326d2367488d5bc473b49a9bc3036c154832520aca9"}, + {file = "numpy-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:d89dd2b6da69c4fff5e39c28a382199ddedc3a5be5390115608345dec660b9e2"}, + {file = "numpy-2.1.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f55ba01150f52b1027829b50d70ef1dafd9821ea82905b63936668403c3b471e"}, + {file = "numpy-2.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13138eadd4f4da03074851a698ffa7e405f41a0845a6b1ad135b81596e4e9958"}, + {file = "numpy-2.1.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a6b46587b14b888e95e4a24d7b13ae91fa22386c199ee7b418f449032b2fa3b8"}, + {file = "numpy-2.1.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:0fa14563cc46422e99daef53d725d0c326e99e468a9320a240affffe87852564"}, + {file = "numpy-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8637dcd2caa676e475503d1f8fdb327bc495554e10838019651b76d17b98e512"}, + {file = "numpy-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2312b2aa89e1f43ecea6da6ea9a810d06aae08321609d8dc0d0eda6d946a541b"}, + {file = "numpy-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a38c19106902bb19351b83802531fea19dee18e5b37b36454f27f11ff956f7fc"}, + {file = "numpy-2.1.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02135ade8b8a84011cbb67dc44e07c58f28575cf9ecf8ab304e51c05528c19f0"}, + {file = "numpy-2.1.3-cp312-cp312-win32.whl", hash = "sha256:e6988e90fcf617da2b5c78902fe8e668361b43b4fe26dbf2d7b0f8034d4cafb9"}, + {file = "numpy-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:0d30c543f02e84e92c4b1f415b7c6b5326cbe45ee7882b6b77db7195fb971e3a"}, + {file = "numpy-2.1.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96fe52fcdb9345b7cd82ecd34547fca4321f7656d500eca497eb7ea5a926692f"}, + {file = "numpy-2.1.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f653490b33e9c3a4c1c01d41bc2aef08f9475af51146e4a7710c450cf9761598"}, + {file = "numpy-2.1.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dc258a761a16daa791081d026f0ed4399b582712e6fc887a95af09df10c5ca57"}, + {file = "numpy-2.1.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:016d0f6f5e77b0f0d45d77387ffa4bb89816b57c835580c3ce8e099ef830befe"}, + {file = "numpy-2.1.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c181ba05ce8299c7aa3125c27b9c2167bca4a4445b7ce73d5febc411ca692e43"}, + {file = "numpy-2.1.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5641516794ca9e5f8a4d17bb45446998c6554704d888f86df9b200e66bdcce56"}, + {file = "numpy-2.1.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ea4dedd6e394a9c180b33c2c872b92f7ce0f8e7ad93e9585312b0c5a04777a4a"}, + {file = "numpy-2.1.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0df3635b9c8ef48bd3be5f862cf71b0a4716fa0e702155c45067c6b711ddcef"}, + {file = "numpy-2.1.3-cp313-cp313-win32.whl", hash = "sha256:50ca6aba6e163363f132b5c101ba078b8cbd3fa92c7865fd7d4d62d9779ac29f"}, + {file = "numpy-2.1.3-cp313-cp313-win_amd64.whl", hash = "sha256:747641635d3d44bcb380d950679462fae44f54b131be347d5ec2bce47d3df9ed"}, + {file = "numpy-2.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:996bb9399059c5b82f76b53ff8bb686069c05acc94656bb259b1d63d04a9506f"}, + {file = "numpy-2.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:45966d859916ad02b779706bb43b954281db43e185015df6eb3323120188f9e4"}, + {file = "numpy-2.1.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:baed7e8d7481bfe0874b566850cb0b85243e982388b7b23348c6db2ee2b2ae8e"}, + {file = "numpy-2.1.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f7f672a3388133335589cfca93ed468509cb7b93ba3105fce780d04a6576a0"}, + {file = "numpy-2.1.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7aac50327da5d208db2eec22eb11e491e3fe13d22653dce51b0f4109101b408"}, + {file = "numpy-2.1.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4394bc0dbd074b7f9b52024832d16e019decebf86caf909d94f6b3f77a8ee3b6"}, + {file = "numpy-2.1.3-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:50d18c4358a0a8a53f12a8ba9d772ab2d460321e6a93d6064fc22443d189853f"}, + {file = "numpy-2.1.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:14e253bd43fc6b37af4921b10f6add6925878a42a0c5fe83daee390bca80bc17"}, + {file = "numpy-2.1.3-cp313-cp313t-win32.whl", hash = "sha256:08788d27a5fd867a663f6fc753fd7c3ad7e92747efc73c53bca2f19f8bc06f48"}, + {file = "numpy-2.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2564fbdf2b99b3f815f2107c1bbc93e2de8ee655a69c261363a1172a79a257d4"}, + {file = "numpy-2.1.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4f2015dfe437dfebbfce7c85c7b53d81ba49e71ba7eadbf1df40c915af75979f"}, + {file = "numpy-2.1.3-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:3522b0dfe983a575e6a9ab3a4a4dfe156c3e428468ff08ce582b9bb6bd1d71d4"}, + {file = "numpy-2.1.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c006b607a865b07cd981ccb218a04fc86b600411d83d6fc261357f1c0966755d"}, + {file = "numpy-2.1.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e14e26956e6f1696070788252dcdff11b4aca4c3e8bd166e0df1bb8f315a67cb"}, + {file = "numpy-2.1.3.tar.gz", hash = "sha256:aa08e04e08aaf974d4458def539dece0d28146d866a39da5639596f4921fd761"}, ] [[package]] name = "opendp" -version = "0.7.0" +version = "0.10.0" description = "Python bindings for the OpenDP Library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "opendp-0.7.0-py3-none-any.whl", hash = "sha256:e803a78854b705915937013e824925fe04ce5666cd922f1d38c9834b7820444c"}, + {file = "opendp-0.10.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:73b1d9e200d44df1e4f6ca84659080bbe00cb50c2cdd96bfae59dcb7393469d1"}, + {file = "opendp-0.10.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:d75d1bd67ec031e8427e40d465c71f4ae80fe0ed4e9619c8881d54b52dfa8969"}, + {file = "opendp-0.10.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffbebb654b40e4b9df725fb47088786b54786046c9f47244a5286a92895fe975"}, + {file = "opendp-0.10.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1907ade70eea733d65e418548ef326986fe7b8466394202872bb6f7c86e452be"}, + {file = "opendp-0.10.0-cp39-abi3-win32.whl", hash = "sha256:b27090536a512e0df266f92d3edd6ce959cc0c3a63d059ce39ec6e0a532eba80"}, + {file = "opendp-0.10.0-cp39-abi3-win_amd64.whl", hash = "sha256:cdf774428679c3191ce421b0729d513abb47bd2247da97484b9bf155b9bf6aa2"}, + {file = "opendp-0.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0090fd6543f52dffd5a6bc9e78695543f74c0b1dfd0584d172a577c4d40c3752"}, + {file = "opendp-0.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21c2a82c101a6996f6bd27c0e573bca8d5d9c7e5db85456c3f3e713400cc60f0"}, + {file = "opendp-0.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f35fefa8f68fd3d28208c56fdfe409f0454bcb1dddb4a77d511e215be0bc552"}, + {file = "opendp-0.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f4a36903e1be544f339a5f083dfccd195a278acf2bae044fb3d9884c3a4f340f"}, + {file = "opendp-0.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0de654a3b81ba8d0266ef3c7e827d477caf5fdaf83397fd859f334ff4d2be774"}, + {file = "opendp-0.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6ab7ed35b778ddfa6008323527ae8b2c05c400c3023f881dd1f7598ec3b9691"}, + {file = "opendp-0.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34769cd8e6b8dfc11a8411bae240b07c3333e344f14cef5548bbfe48fa7bd9c1"}, + {file = "opendp-0.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4a260206ad6204c3e6149448597fe775e9202a48cd3ead2f0a3a1962280412d3"}, + {file = "opendp-0.10.0-py3-none-manylinux_2_24_aarch64.whl", hash = "sha256:f5ed014e32885fbb6e7d2e3316eeb48ebe941ae00d0d0a8bd2a424f4d2d9993a"}, + {file = "opendp-0.10.0.tar.gz", hash = "sha256:80598450c9be450ae6950c7f5fedfd2482ee4e419f511db8ba7f76d7db6d66b1"}, ] +[package.extras] +numpy = ["numpy (>=1.17,<2.0)", "randomgen"] +polars = ["numpy (>=1.17,<2.0)", "polars (==0.20.16)", "pyarrow", "randomgen", "scikit-learn"] +scikit-learn = ["numpy (>=1.17,<2.0)", "randomgen", "scikit-learn"] + [[package]] name = "packaging" -version = "23.2" +version = "24.2" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] name = "pandas" -version = "2.1.1" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58d997dbee0d4b64f3cb881a24f918b5f25dd64ddf31f467bb9b67ae4c63a1e4"}, - {file = "pandas-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02304e11582c5d090e5a52aec726f31fe3f42895d6bfc1f28738f9b64b6f0614"}, - {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffa8f0966de2c22de408d0e322db2faed6f6e74265aa0856f3824813cf124363"}, - {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1f84c144dee086fe4f04a472b5cd51e680f061adf75c1ae4fc3a9275560f8f4"}, - {file = "pandas-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ce97667d06d69396d72be074f0556698c7f662029322027c226fd7a26965cb"}, - {file = "pandas-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:4c3f32fd7c4dccd035f71734df39231ac1a6ff95e8bdab8d891167197b7018d2"}, - {file = "pandas-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e2959720b70e106bb1d8b6eadd8ecd7c8e99ccdbe03ee03260877184bb2877d"}, - {file = "pandas-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25e8474a8eb258e391e30c288eecec565bfed3e026f312b0cbd709a63906b6f8"}, - {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8bd1685556f3374520466998929bade3076aeae77c3e67ada5ed2b90b4de7f0"}, - {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc3657869c7902810f32bd072f0740487f9e030c1a3ab03e0af093db35a9d14e"}, - {file = "pandas-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:05674536bd477af36aa2effd4ec8f71b92234ce0cc174de34fd21e2ee99adbc2"}, - {file = "pandas-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:b407381258a667df49d58a1b637be33e514b07f9285feb27769cedb3ab3d0b3a"}, - {file = "pandas-2.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c747793c4e9dcece7bb20156179529898abf505fe32cb40c4052107a3c620b49"}, - {file = "pandas-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bcad1e6fb34b727b016775bea407311f7721db87e5b409e6542f4546a4951ea"}, - {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5ec7740f9ccb90aec64edd71434711f58ee0ea7f5ed4ac48be11cfa9abf7317"}, - {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29deb61de5a8a93bdd033df328441a79fcf8dd3c12d5ed0b41a395eef9cd76f0"}, - {file = "pandas-2.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f99bebf19b7e03cf80a4e770a3e65eee9dd4e2679039f542d7c1ace7b7b1daa"}, - {file = "pandas-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:84e7e910096416adec68075dc87b986ff202920fb8704e6d9c8c9897fe7332d6"}, - {file = "pandas-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366da7b0e540d1b908886d4feb3d951f2f1e572e655c1160f5fde28ad4abb750"}, - {file = "pandas-2.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e50e72b667415a816ac27dfcfe686dc5a0b02202e06196b943d54c4f9c7693e"}, - {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1ab6a25da197f03ebe6d8fa17273126120874386b4ac11c1d687df288542dd"}, - {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0dbfea0dd3901ad4ce2306575c54348d98499c95be01b8d885a2737fe4d7a98"}, - {file = "pandas-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0489b0e6aa3d907e909aef92975edae89b1ee1654db5eafb9be633b0124abe97"}, - {file = "pandas-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:4cdb0fab0400c2cb46dafcf1a0fe084c8bb2480a1fa8d81e19d15e12e6d4ded2"}, - {file = "pandas-2.1.1.tar.gz", hash = "sha256:fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] -numpy = {version = ">=1.22.4", markers = "python_version < \"3.11\""} +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, +] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] -aws = ["s3fs (>=2022.05.0)"] -clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] -compression = ["zstandard (>=0.17.0)"] -computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2022.05.0)"] -gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] -hdf5 = ["tables (>=3.7.0)"] -html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] -mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] -spss = ["pyreadstat (>=1.1.5)"] -sql-other = ["SQLAlchemy (>=1.4.36)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.8.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "parsy" +version = "2.1" +description = "Easy-to-use parser combinators, for parsing in pure Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "parsy-2.1-py3-none-any.whl", hash = "sha256:8f18e7b11985e7802e7e3ecbd8291c6ca243d29820b1186e4c84605db4efffa0"}, + {file = "parsy-2.1.tar.gz", hash = "sha256:fd5dd18d7b0b61f8275ee88665f430a20c02cf5a82d88557f35330530186d7ac"}, +] [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "platformdirs" -version = "3.11.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, - {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pockets" @@ -993,13 +1218,13 @@ six = ">=1.5.2" [[package]] name = "pre-commit" -version = "3.5.0" +version = "3.8.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, - {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, ] [package.dependencies] @@ -1011,125 +1236,130 @@ virtualenv = ">=20.10.0" [[package]] name = "psycopg2-binary" -version = "2.9.9" +version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, + {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, ] [[package]] name = "pydantic" -version = "1.10.13" +version = "1.10.19" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-1.10.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a415b9e95fa602b10808113967f72b2da8722061265d6af69268c111c254832d"}, + {file = "pydantic-1.10.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:11965f421f7eb026439d4eb7464e9182fe6d69c3d4d416e464a4485d1ba61ab6"}, + {file = "pydantic-1.10.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5bb81fcfc6d5bff62cd786cbd87480a11d23f16d5376ad2e057c02b3b44df96"}, + {file = "pydantic-1.10.19-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83ee8c9916689f8e6e7d90161e6663ac876be2efd32f61fdcfa3a15e87d4e413"}, + {file = "pydantic-1.10.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0399094464ae7f28482de22383e667625e38e1516d6b213176df1acdd0c477ea"}, + {file = "pydantic-1.10.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8b2cf5e26da84f2d2dee3f60a3f1782adedcee785567a19b68d0af7e1534bd1f"}, + {file = "pydantic-1.10.19-cp310-cp310-win_amd64.whl", hash = "sha256:1fc8cc264afaf47ae6a9bcbd36c018d0c6b89293835d7fb0e5e1a95898062d59"}, + {file = "pydantic-1.10.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7a8a1dd68bac29f08f0a3147de1885f4dccec35d4ea926e6e637fac03cdb4b3"}, + {file = "pydantic-1.10.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07d00ca5ef0de65dd274005433ce2bb623730271d495a7d190a91c19c5679d34"}, + {file = "pydantic-1.10.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad57004e5d73aee36f1e25e4e73a4bc853b473a1c30f652dc8d86b0a987ffce3"}, + {file = "pydantic-1.10.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dce355fe7ae53e3090f7f5fa242423c3a7b53260747aa398b4b3aaf8b25f41c3"}, + {file = "pydantic-1.10.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0d32227ea9a3bf537a2273fd2fdb6d64ab4d9b83acd9e4e09310a777baaabb98"}, + {file = "pydantic-1.10.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e351df83d1c9cffa53d4e779009a093be70f1d5c6bb7068584086f6a19042526"}, + {file = "pydantic-1.10.19-cp311-cp311-win_amd64.whl", hash = "sha256:d8d72553d2f3f57ce547de4fa7dc8e3859927784ab2c88343f1fc1360ff17a08"}, + {file = "pydantic-1.10.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d5b5b7c6bafaef90cbb7dafcb225b763edd71d9e22489647ee7df49d6d341890"}, + {file = "pydantic-1.10.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:570ad0aeaf98b5e33ff41af75aba2ef6604ee25ce0431ecd734a28e74a208555"}, + {file = "pydantic-1.10.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0890fbd7fec9e151c7512941243d830b2d6076d5df159a2030952d480ab80a4e"}, + {file = "pydantic-1.10.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec5c44e6e9eac5128a9bfd21610df3b8c6b17343285cc185105686888dc81206"}, + {file = "pydantic-1.10.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6eb56074b11a696e0b66c7181da682e88c00e5cebe6570af8013fcae5e63e186"}, + {file = "pydantic-1.10.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d7d48fbc5289efd23982a0d68e973a1f37d49064ccd36d86de4543aff21e086"}, + {file = "pydantic-1.10.19-cp312-cp312-win_amd64.whl", hash = "sha256:fd34012691fbd4e67bdf4accb1f0682342101015b78327eaae3543583fcd451e"}, + {file = "pydantic-1.10.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a5d5b877c7d3d9e17399571a8ab042081d22fe6904416a8b20f8af5909e6c8f"}, + {file = "pydantic-1.10.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c46f58ef2df958ed2ea7437a8be0897d5efe9ee480818405338c7da88186fb3"}, + {file = "pydantic-1.10.19-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d8a38a44bb6a15810084316ed69c854a7c06e0c99c5429f1d664ad52cec353c"}, + {file = "pydantic-1.10.19-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a82746c6d6e91ca17e75f7f333ed41d70fce93af520a8437821dec3ee52dfb10"}, + {file = "pydantic-1.10.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:566bebdbe6bc0ac593fa0f67d62febbad9f8be5433f686dc56401ba4aab034e3"}, + {file = "pydantic-1.10.19-cp37-cp37m-win_amd64.whl", hash = "sha256:22a1794e01591884741be56c6fba157c4e99dcc9244beb5a87bd4aa54b84ea8b"}, + {file = "pydantic-1.10.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:076c49e24b73d346c45f9282d00dbfc16eef7ae27c970583d499f11110d9e5b0"}, + {file = "pydantic-1.10.19-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d4320510682d5a6c88766b2a286d03b87bd3562bf8d78c73d63bab04b21e7b4"}, + {file = "pydantic-1.10.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e66aa0fa7f8aa9d0a620361834f6eb60d01d3e9cea23ca1a92cda99e6f61dac"}, + {file = "pydantic-1.10.19-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d216f8d0484d88ab72ab45d699ac669fe031275e3fa6553e3804e69485449fa0"}, + {file = "pydantic-1.10.19-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f28a81978e936136c44e6a70c65bde7548d87f3807260f73aeffbf76fb94c2f"}, + {file = "pydantic-1.10.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d3449633c207ec3d2d672eedb3edbe753e29bd4e22d2e42a37a2c1406564c20f"}, + {file = "pydantic-1.10.19-cp38-cp38-win_amd64.whl", hash = "sha256:7ea24e8614f541d69ea72759ff635df0e612b7dc9d264d43f51364df310081a3"}, + {file = "pydantic-1.10.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:573254d844f3e64093f72fcd922561d9c5696821ff0900a0db989d8c06ab0c25"}, + {file = "pydantic-1.10.19-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff09600cebe957ecbb4a27496fe34c1d449e7957ed20a202d5029a71a8af2e35"}, + {file = "pydantic-1.10.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4739c206bfb6bb2bdc78dcd40bfcebb2361add4ceac6d170e741bb914e9eff0f"}, + {file = "pydantic-1.10.19-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bfb5b378b78229119d66ced6adac2e933c67a0aa1d0a7adffbe432f3ec14ce4"}, + {file = "pydantic-1.10.19-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7f31742c95e3f9443b8c6fa07c119623e61d76603be9c0d390bcf7e888acabcb"}, + {file = "pydantic-1.10.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6444368b651a14c2ce2fb22145e1496f7ab23cbdb978590d47c8d34a7bc0289"}, + {file = "pydantic-1.10.19-cp39-cp39-win_amd64.whl", hash = "sha256:945407f4d08cd12485757a281fca0e5b41408606228612f421aa4ea1b63a095d"}, + {file = "pydantic-1.10.19-py3-none-any.whl", hash = "sha256:2206a1752d9fac011e95ca83926a269fb0ef5536f7e053966d058316e24d929f"}, + {file = "pydantic-1.10.19.tar.gz", hash = "sha256:fea36c2065b7a1d28c6819cc2e93387b43dd5d3cf5a1e82d8132ee23f36d1f10"}, ] [package.dependencies] @@ -1159,34 +1389,38 @@ toml = ["tomli (>=1.2.3)"] [[package]] name = "pygments" -version = "2.16.1" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylint" -version = "3.0.1" +version = "3.3.1" description = "python code static checker" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "pylint-3.0.1-py3-none-any.whl", hash = "sha256:9c90b89e2af7809a1697f6f5f93f1d0e518ac566e2ac4d2af881a69c13ad01ea"}, - {file = "pylint-3.0.1.tar.gz", hash = "sha256:81c6125637be216b4652ae50cc42b9f8208dfb725cdc7e04c48f6902f4dbdf40"}, + {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, + {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, ] [package.dependencies] -astroid = ">=3.0.0,<=3.1.0-dev0" +astroid = ">=3.3.4,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = {version = ">=0.2", markers = "python_version < \"3.11\""} -isort = ">=4.2.5,<6" +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} @@ -1199,13 +1433,13 @@ testutils = ["gitpython (>3)"] [[package]] name = "pymysql" -version = "1.1.0" +version = "1.1.1" description = "Pure Python MySQL Driver" optional = false python-versions = ">=3.7" files = [ - {file = "PyMySQL-1.1.0-py3-none-any.whl", hash = "sha256:8969ec6d763c856f7073c4c64662882675702efcb114b4bcbb955aea3a069fa7"}, - {file = "PyMySQL-1.1.0.tar.gz", hash = "sha256:4f13a7df8bf36a51e81dd9f3605fede45a4878fe02f9236349fd82a3f0612f96"}, + {file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"}, + {file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"}, ] [package.extras] @@ -1214,13 +1448,13 @@ rsa = ["cryptography"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1228,13 +1462,13 @@ six = ">=1.5" [[package]] name = "python-dotenv" -version = "1.0.0" +version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" files = [ - {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, - {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, ] [package.extras] @@ -1242,73 +1476,86 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "referencing" -version = "0.30.2" +version = "0.35.1" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, - {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, + {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, + {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, ] [package.dependencies] @@ -1317,13 +1564,13 @@ rpds-py = ">=0.7.0" [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1351,110 +1598,101 @@ docutils = ">=0.11,<1.0" [[package]] name = "rpds-py" -version = "0.10.6" +version = "0.21.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "rpds_py-0.10.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bdc11f9623870d75692cc33c59804b5a18d7b8a4b79ef0b00b773a27397d1f6"}, - {file = "rpds_py-0.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:26857f0f44f0e791f4a266595a7a09d21f6b589580ee0585f330aaccccb836e3"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7f5e15c953ace2e8dde9824bdab4bec50adb91a5663df08d7d994240ae6fa31"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61fa268da6e2e1cd350739bb61011121fa550aa2545762e3dc02ea177ee4de35"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c48f3fbc3e92c7dd6681a258d22f23adc2eb183c8cb1557d2fcc5a024e80b094"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0503c5b681566e8b722fe8c4c47cce5c7a51f6935d5c7012c4aefe952a35eed"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:734c41f9f57cc28658d98270d3436dba65bed0cfc730d115b290e970150c540d"}, - {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a5d7ed104d158c0042a6a73799cf0eb576dfd5fc1ace9c47996e52320c37cb7c"}, - {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e3df0bc35e746cce42579826b89579d13fd27c3d5319a6afca9893a9b784ff1b"}, - {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:73e0a78a9b843b8c2128028864901f55190401ba38aae685350cf69b98d9f7c9"}, - {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ed505ec6305abd2c2c9586a7b04fbd4baf42d4d684a9c12ec6110deefe2a063"}, - {file = "rpds_py-0.10.6-cp310-none-win32.whl", hash = "sha256:d97dd44683802000277bbf142fd9f6b271746b4846d0acaf0cefa6b2eaf2a7ad"}, - {file = "rpds_py-0.10.6-cp310-none-win_amd64.whl", hash = "sha256:b455492cab07107bfe8711e20cd920cc96003e0da3c1f91297235b1603d2aca7"}, - {file = "rpds_py-0.10.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e8cdd52744f680346ff8c1ecdad5f4d11117e1724d4f4e1874f3a67598821069"}, - {file = "rpds_py-0.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66414dafe4326bca200e165c2e789976cab2587ec71beb80f59f4796b786a238"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc435d059f926fdc5b05822b1be4ff2a3a040f3ae0a7bbbe672babb468944722"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7f2219cb72474571974d29a191714d822e58be1eb171f229732bc6fdedf0ac"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3953c6926a63f8ea5514644b7afb42659b505ece4183fdaaa8f61d978754349e"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bb2e4826be25e72013916eecd3d30f66fd076110de09f0e750163b416500721"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf347b495b197992efc81a7408e9a83b931b2f056728529956a4d0858608b80"}, - {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:102eac53bb0bf0f9a275b438e6cf6904904908562a1463a6fc3323cf47d7a532"}, - {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40f93086eef235623aa14dbddef1b9fb4b22b99454cb39a8d2e04c994fb9868c"}, - {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e22260a4741a0e7a206e175232867b48a16e0401ef5bce3c67ca5b9705879066"}, - {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4e56860a5af16a0fcfa070a0a20c42fbb2012eed1eb5ceeddcc7f8079214281"}, - {file = "rpds_py-0.10.6-cp311-none-win32.whl", hash = "sha256:0774a46b38e70fdde0c6ded8d6d73115a7c39d7839a164cc833f170bbf539116"}, - {file = "rpds_py-0.10.6-cp311-none-win_amd64.whl", hash = "sha256:4a5ee600477b918ab345209eddafde9f91c0acd931f3776369585a1c55b04c57"}, - {file = "rpds_py-0.10.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:5ee97c683eaface61d38ec9a489e353d36444cdebb128a27fe486a291647aff6"}, - {file = "rpds_py-0.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0713631d6e2d6c316c2f7b9320a34f44abb644fc487b77161d1724d883662e31"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5a53f5998b4bbff1cb2e967e66ab2addc67326a274567697379dd1e326bded7"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a555ae3d2e61118a9d3e549737bb4a56ff0cec88a22bd1dfcad5b4e04759175"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:945eb4b6bb8144909b203a88a35e0a03d22b57aefb06c9b26c6e16d72e5eb0f0"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52c215eb46307c25f9fd2771cac8135d14b11a92ae48d17968eda5aa9aaf5071"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1b3cd23d905589cb205710b3988fc8f46d4a198cf12862887b09d7aaa6bf9b9"}, - {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64ccc28683666672d7c166ed465c09cee36e306c156e787acef3c0c62f90da5a"}, - {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:516a611a2de12fbea70c78271e558f725c660ce38e0006f75139ba337d56b1f6"}, - {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9ff93d3aedef11f9c4540cf347f8bb135dd9323a2fc705633d83210d464c579d"}, - {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d858532212f0650be12b6042ff4378dc2efbb7792a286bee4489eaa7ba010586"}, - {file = "rpds_py-0.10.6-cp312-none-win32.whl", hash = "sha256:3c4eff26eddac49d52697a98ea01b0246e44ca82ab09354e94aae8823e8bda02"}, - {file = "rpds_py-0.10.6-cp312-none-win_amd64.whl", hash = "sha256:150eec465dbc9cbca943c8e557a21afdcf9bab8aaabf386c44b794c2f94143d2"}, - {file = "rpds_py-0.10.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:cf693eb4a08eccc1a1b636e4392322582db2a47470d52e824b25eca7a3977b53"}, - {file = "rpds_py-0.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4134aa2342f9b2ab6c33d5c172e40f9ef802c61bb9ca30d21782f6e035ed0043"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e782379c2028a3611285a795b89b99a52722946d19fc06f002f8b53e3ea26ea9"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f6da6d842195fddc1cd34c3da8a40f6e99e4a113918faa5e60bf132f917c247"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a9fe992887ac68256c930a2011255bae0bf5ec837475bc6f7edd7c8dfa254e"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b788276a3c114e9f51e257f2a6f544c32c02dab4aa7a5816b96444e3f9ffc336"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa1afc70a02645809c744eefb7d6ee8fef7e2fad170ffdeacca267fd2674f13"}, - {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bddd4f91eede9ca5275e70479ed3656e76c8cdaaa1b354e544cbcf94c6fc8ac4"}, - {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:775049dfa63fb58293990fc59473e659fcafd953bba1d00fc5f0631a8fd61977"}, - {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c6c45a2d2b68c51fe3d9352733fe048291e483376c94f7723458cfd7b473136b"}, - {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0699ab6b8c98df998c3eacf51a3b25864ca93dab157abe358af46dc95ecd9801"}, - {file = "rpds_py-0.10.6-cp38-none-win32.whl", hash = "sha256:ebdab79f42c5961682654b851f3f0fc68e6cc7cd8727c2ac4ffff955154123c1"}, - {file = "rpds_py-0.10.6-cp38-none-win_amd64.whl", hash = "sha256:24656dc36f866c33856baa3ab309da0b6a60f37d25d14be916bd3e79d9f3afcf"}, - {file = "rpds_py-0.10.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:0898173249141ee99ffcd45e3829abe7bcee47d941af7434ccbf97717df020e5"}, - {file = "rpds_py-0.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e9184fa6c52a74a5521e3e87badbf9692549c0fcced47443585876fcc47e469"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5752b761902cd15073a527b51de76bbae63d938dc7c5c4ad1e7d8df10e765138"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99a57006b4ec39dbfb3ed67e5b27192792ffb0553206a107e4aadb39c5004cd5"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09586f51a215d17efdb3a5f090d7cbf1633b7f3708f60a044757a5d48a83b393"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e225a6a14ecf44499aadea165299092ab0cba918bb9ccd9304eab1138844490b"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2039f8d545f20c4e52713eea51a275e62153ee96c8035a32b2abb772b6fc9e5"}, - {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34ad87a831940521d462ac11f1774edf867c34172010f5390b2f06b85dcc6014"}, - {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dcdc88b6b01015da066da3fb76545e8bb9a6880a5ebf89e0f0b2e3ca557b3ab7"}, - {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:25860ed5c4e7f5e10c496ea78af46ae8d8468e0be745bd233bab9ca99bfd2647"}, - {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7854a207ef77319ec457c1eb79c361b48807d252d94348305db4f4b62f40f7f3"}, - {file = "rpds_py-0.10.6-cp39-none-win32.whl", hash = "sha256:e6fcc026a3f27c1282c7ed24b7fcac82cdd70a0e84cc848c0841a3ab1e3dea2d"}, - {file = "rpds_py-0.10.6-cp39-none-win_amd64.whl", hash = "sha256:e98c4c07ee4c4b3acf787e91b27688409d918212dfd34c872201273fdd5a0e18"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:68fe9199184c18d997d2e4293b34327c0009a78599ce703e15cd9a0f47349bba"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3339eca941568ed52d9ad0f1b8eb9fe0958fa245381747cecf2e9a78a5539c42"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a360cfd0881d36c6dc271992ce1eda65dba5e9368575663de993eeb4523d895f"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:031f76fc87644a234883b51145e43985aa2d0c19b063e91d44379cd2786144f8"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f36a9d751f86455dc5278517e8b65580eeee37d61606183897f122c9e51cef3"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:052a832078943d2b2627aea0d19381f607fe331cc0eb5df01991268253af8417"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023574366002bf1bd751ebaf3e580aef4a468b3d3c216d2f3f7e16fdabd885ed"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:defa2c0c68734f4a82028c26bcc85e6b92cced99866af118cd6a89b734ad8e0d"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879fb24304ead6b62dbe5034e7b644b71def53c70e19363f3c3be2705c17a3b4"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:53c43e10d398e365da2d4cc0bcaf0854b79b4c50ee9689652cdc72948e86f487"}, - {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3777cc9dea0e6c464e4b24760664bd8831738cc582c1d8aacf1c3f546bef3f65"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:40578a6469e5d1df71b006936ce95804edb5df47b520c69cf5af264d462f2cbb"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:cf71343646756a072b85f228d35b1d7407da1669a3de3cf47f8bbafe0c8183a4"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10f32b53f424fc75ff7b713b2edb286fdbfc94bf16317890260a81c2c00385dc"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:81de24a1c51cfb32e1fbf018ab0bdbc79c04c035986526f76c33e3f9e0f3356c"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac17044876e64a8ea20ab132080ddc73b895b4abe9976e263b0e30ee5be7b9c2"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e8a78bd4879bff82daef48c14d5d4057f6856149094848c3ed0ecaf49f5aec2"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78ca33811e1d95cac8c2e49cb86c0fb71f4d8409d8cbea0cb495b6dbddb30a55"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c63c3ef43f0b3fb00571cff6c3967cc261c0ebd14a0a134a12e83bdb8f49f21f"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:7fde6d0e00b2fd0dbbb40c0eeec463ef147819f23725eda58105ba9ca48744f4"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:79edd779cfc46b2e15b0830eecd8b4b93f1a96649bcb502453df471a54ce7977"}, - {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9164ec8010327ab9af931d7ccd12ab8d8b5dc2f4c6a16cbdd9d087861eaaefa1"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d29ddefeab1791e3c751e0189d5f4b3dbc0bbe033b06e9c333dca1f99e1d523e"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:30adb75ecd7c2a52f5e76af50644b3e0b5ba036321c390b8e7ec1bb2a16dd43c"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd609fafdcdde6e67a139898196698af37438b035b25ad63704fd9097d9a3482"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6eef672de005736a6efd565577101277db6057f65640a813de6c2707dc69f396"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cf4393c7b41abbf07c88eb83e8af5013606b1cdb7f6bc96b1b3536b53a574b8"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad857f42831e5b8d41a32437f88d86ead6c191455a3499c4b6d15e007936d4cf"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7360573f1e046cb3b0dceeb8864025aa78d98be4bb69f067ec1c40a9e2d9df"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d08f63561c8a695afec4975fae445245386d645e3e446e6f260e81663bfd2e38"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:f0f17f2ce0f3529177a5fff5525204fad7b43dd437d017dd0317f2746773443d"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:442626328600bde1d09dc3bb00434f5374948838ce75c41a52152615689f9403"}, - {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e9616f5bd2595f7f4a04b67039d890348ab826e943a9bfdbe4938d0eba606971"}, - {file = "rpds_py-0.10.6.tar.gz", hash = "sha256:4ce5a708d65a8dbf3748d2474b580d606b1b9f91b5c6ab2a316e0b0cf7a4ba50"}, + {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"}, + {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"}, + {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"}, + {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"}, + {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"}, + {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"}, + {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"}, + {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"}, + {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"}, + {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"}, + {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"}, + {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"}, + {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"}, + {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"}, + {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"}, + {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"}, + {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"}, + {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"}, + {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"}, + {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"}, + {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"}, ] [[package]] @@ -1480,22 +1718,6 @@ sphinx = ["sphinx (>=4.0,<6.0)"] testing = ["coverage-conditional-plugin (>=0.5)", "coverage[toml] (>=6.0)", "pytest (>=6.0)", "pytest-cov (>=3.0)", "pytest-mock (>=3.7)", "pytest-randomly (>=3.0)", "pytest-sugar (>=0.9.5)"] toml = ["tomli (>=2.0,<3.0)"] -[[package]] -name = "setuptools" -version = "68.2.2" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, - {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -1509,19 +1731,19 @@ files = [ [[package]] name = "smartnoise-sql" -version = "1.0.1" +version = "1.0.4" description = "Differentially Private SQL Queries" optional = false -python-versions = ">=3.7,<3.12" +python-versions = "<3.13,>=3.8" files = [ - {file = "smartnoise-sql-1.0.1.tar.gz", hash = "sha256:c5f426be816d3deae45d2829ab3b0fee7796f42fbe165ce983f70e6726d7a889"}, - {file = "smartnoise_sql-1.0.1-py3-none-any.whl", hash = "sha256:2694aea4a0c5dd8e32ce973e395902e5a91e29c29d2052f3f0fd07e615673c3c"}, + {file = "smartnoise-sql-1.0.4.tar.gz", hash = "sha256:93b5265df51e0e43388613a2ffea5a52b8d3772908f3ac78bd8bb2c7d121f01f"}, + {file = "smartnoise_sql-1.0.4-py3-none-any.whl", hash = "sha256:9dffc020b9e96ccd99ac746933e9a9ff07222505836f6ea21f3f0dca02917a7f"}, ] [package.dependencies] antlr4-python3-runtime = "4.9.3" graphviz = ">=0.17,<0.18" -opendp = ">=0.7.0,<0.8.0" +opendp = ">=0.8.0,<0.11.0" pandas = ">=2.0.1,<3.0.0" PyYAML = ">=6.0.1,<7.0.0" sqlalchemy = ">=2.0.0,<3.0.0" @@ -1593,56 +1815,50 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.7" +version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_applehelp-1.0.7-py3-none-any.whl", hash = "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d"}, - {file = "sphinxcontrib_applehelp-1.0.7.tar.gz", hash = "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"}, + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.5" +version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_devhelp-1.0.5-py3-none-any.whl", hash = "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"}, - {file = "sphinxcontrib_devhelp-1.0.5.tar.gz", hash = "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212"}, + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.4" +version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.4-py3-none-any.whl", hash = "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"}, - {file = "sphinxcontrib_htmlhelp-2.0.4.tar.gz", hash = "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a"}, + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] @@ -1690,137 +1906,147 @@ six = ">=1.5.2" [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.6" +version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.6-py3-none-any.whl", hash = "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"}, - {file = "sphinxcontrib_qthelp-1.0.6.tar.gz", hash = "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d"}, + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.9" +version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"}, - {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"}, + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sqlacodegen" -version = "3.0.0rc3" +version = "0.0.post331" description = "Automatic model code generator for SQLAlchemy" optional = false -python-versions = ">=3.7" -files = [ - {file = "sqlacodegen-3.0.0rc3-py3-none-any.whl", hash = "sha256:e83089ebe9d4fc2fc7d2b0d2c265822d6cc4b5e84d3ce975338e136f337267af"}, - {file = "sqlacodegen-3.0.0rc3.tar.gz", hash = "sha256:dbc05fd56e0622abd4f76817c935ca84ae360c538279678263b5b75000b81ccc"}, -] +python-versions = ">=3.8" +files = [] +develop = false [package.dependencies] importlib-metadata = {version = "*", markers = "python_version < \"3.10\""} inflect = ">=4.0.0" -SQLAlchemy = ">=1.4.36" +SQLAlchemy = ">=2.0.29" [package.extras] citext = ["sqlalchemy-citext (>=1.7.0)"] geoalchemy2 = ["geoalchemy2 (>=0.11.1)"] -sqlmodel = ["sqlmodel"] -test = ["mysql-connector-python", "psycopg2-binary", "pytest", "pytest-cov"] +pgvector = ["pgvector (>=0.2.4)"] +sqlmodel = ["sqlmodel (>=0.0.12)"] +test = ["coverage (>=7)", "mysql-connector-python", "psycopg2-binary", "pytest (>=7.4)"] + +[package.source] +type = "git" +url = "https://github.com/amacfie/sqlacodegen" +reference = "domain_check_textclause" +resolved_reference = "073c2e7105da79e53163c10d20c8ea7001ca7b66" [[package]] name = "sqlalchemy" -version = "2.0.22" +version = "2.0.36" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:875de9414393e778b655a3d97d60465eb3fae7c919e88b70cc10b40b9f56042d"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13790cb42f917c45c9c850b39b9941539ca8ee7917dacf099cc0b569f3d40da7"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04ab55cf49daf1aeb8c622c54d23fa4bec91cb051a43cc24351ba97e1dd09f5"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a42c9fa3abcda0dcfad053e49c4f752eef71ecd8c155221e18b99d4224621176"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14cd3bcbb853379fef2cd01e7c64a5d6f1d005406d877ed9509afb7a05ff40a5"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-win32.whl", hash = "sha256:d143c5a9dada696bcfdb96ba2de4a47d5a89168e71d05a076e88a01386872f97"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-win_amd64.whl", hash = "sha256:ccd87c25e4c8559e1b918d46b4fa90b37f459c9b4566f1dfbce0eb8122571547"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f6ff392b27a743c1ad346d215655503cec64405d3b694228b3454878bf21590"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f776c2c30f0e5f4db45c3ee11a5f2a8d9de68e81eb73ec4237de1e32e04ae81c"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f1792d20d2f4e875ce7a113f43c3561ad12b34ff796b84002a256f37ce9437"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80eeb5189d7d4b1af519fc3f148fe7521b9dfce8f4d6a0820e8f5769b005051"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69fd9e41cf9368afa034e1c81f3570afb96f30fcd2eb1ef29cb4d9371c6eece2"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54bcceaf4eebef07dadfde424f5c26b491e4a64e61761dea9459103ecd6ccc95"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-win32.whl", hash = "sha256:7ee7ccf47aa503033b6afd57efbac6b9e05180f492aeed9fcf70752556f95624"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-win_amd64.whl", hash = "sha256:b560f075c151900587ade06706b0c51d04b3277c111151997ea0813455378ae0"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2c9bac865ee06d27a1533471405ad240a6f5d83195eca481f9fc4a71d8b87df8"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:625b72d77ac8ac23da3b1622e2da88c4aedaee14df47c8432bf8f6495e655de2"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39a6e21110204a8c08d40ff56a73ba542ec60bab701c36ce721e7990df49fb9"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53a766cb0b468223cafdf63e2d37f14a4757476157927b09300c8c5832d88560"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0e1ce8ebd2e040357dde01a3fb7d30d9b5736b3e54a94002641dfd0aa12ae6ce"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:505f503763a767556fa4deae5194b2be056b64ecca72ac65224381a0acab7ebe"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-win32.whl", hash = "sha256:154a32f3c7b00de3d090bc60ec8006a78149e221f1182e3edcf0376016be9396"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-win_amd64.whl", hash = "sha256:129415f89744b05741c6f0b04a84525f37fbabe5dc3774f7edf100e7458c48cd"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3940677d341f2b685a999bffe7078697b5848a40b5f6952794ffcf3af150c301"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55914d45a631b81a8a2cb1a54f03eea265cf1783241ac55396ec6d735be14883"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2096d6b018d242a2bcc9e451618166f860bb0304f590d205173d317b69986c95"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:19c6986cf2fb4bc8e0e846f97f4135a8e753b57d2aaaa87c50f9acbe606bd1db"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ac28bd6888fe3c81fbe97584eb0b96804bd7032d6100b9701255d9441373ec1"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-win32.whl", hash = "sha256:cb9a758ad973e795267da334a92dd82bb7555cb36a0960dcabcf724d26299db8"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-win_amd64.whl", hash = "sha256:40b1206a0d923e73aa54f0a6bd61419a96b914f1cd19900b6c8226899d9742ad"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3aa1472bf44f61dd27987cd051f1c893b7d3b17238bff8c23fceaef4f1133868"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:56a7e2bb639df9263bf6418231bc2a92a773f57886d371ddb7a869a24919face"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccca778c0737a773a1ad86b68bda52a71ad5950b25e120b6eb1330f0df54c3d0"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6c3e9350f9fb16de5b5e5fbf17b578811a52d71bb784cc5ff71acb7de2a7f9"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:564e9f9e4e6466273dbfab0e0a2e5fe819eec480c57b53a2cdee8e4fdae3ad5f"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af66001d7b76a3fab0d5e4c1ec9339ac45748bc4a399cbc2baa48c1980d3c1f4"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-win32.whl", hash = "sha256:9e55dff5ec115316dd7a083cdc1a52de63693695aecf72bc53a8e1468ce429e5"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-win_amd64.whl", hash = "sha256:4e869a8ff7ee7a833b74868a0887e8462445ec462432d8cbeff5e85f475186da"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9886a72c8e6371280cb247c5d32c9c8fa141dc560124348762db8a8b236f8692"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a571bc8ac092a3175a1d994794a8e7a1f2f651e7c744de24a19b4f740fe95034"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db5ba8b7da759b727faebc4289a9e6a51edadc7fc32207a30f7c6203a181592"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bb062784f37b2d75fd9b074c8ec360ad5df71f933f927e9e95c50eb8e05323c"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-win32.whl", hash = "sha256:58a3aba1bfb32ae7af68da3f277ed91d9f57620cf7ce651db96636790a78b736"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"}, - {file = "SQLAlchemy-2.0.22-py3-none-any.whl", hash = "sha256:3076740335e4aaadd7deb3fe6dcb96b3015f1613bd190a4e1634e1b99b02ec86"}, - {file = "SQLAlchemy-2.0.22.tar.gz", hash = "sha256:5434cc601aa17570d79e5377f5fd45ff92f9379e2abed0be5e8c2fba8d353d2b"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} -typing-extensions = ">=4.2.0" +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)"] +oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] @@ -1830,17 +2056,17 @@ postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlalchemy-utils" -version = "0.38.3" +version = "0.41.2" description = "Various utility functions for SQLAlchemy." optional = false -python-versions = "~=3.6" +python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-Utils-0.38.3.tar.gz", hash = "sha256:9f9afba607a40455cf703adfa9846584bf26168a0c5a60a70063b70d65051f4d"}, - {file = "SQLAlchemy_Utils-0.38.3-py3-none-any.whl", hash = "sha256:5c13b5d08adfaa85f3d4e8ec09a75136216fad41346980d02974a70a77988bf9"}, + {file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"}, + {file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"}, ] [package.dependencies] @@ -1855,42 +2081,42 @@ intervals = ["intervals (>=0.7.1)"] password = ["passlib (>=1.6,<2.0)"] pendulum = ["pendulum (>=2.0.5)"] phone = ["phonenumbers (>=5.9.2)"] -test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] -test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] timezone = ["python-dateutil"] url = ["furl (>=0.4.1)"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.1.0" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, + {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, ] [[package]] name = "tomlkit" -version = "0.12.1" +version = "0.13.2" description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, - {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] name = "tqdm" -version = "4.66.1" +version = "4.67.0" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.67.0-py3-none-any.whl", hash = "sha256:0cd8af9d56911acab92182e88d763100d4788bdf421d251616040cc4d44863be"}, + {file = "tqdm-4.67.0.tar.gz", hash = "sha256:fe5a6f95e6fe0b9755e9469b77b9c3cf850048224ecaa8293d7d2d31f97d869a"}, ] [package.dependencies] @@ -1898,10 +2124,30 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +discord = ["requests"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "typeguard" +version = "4.4.1" +description = "Run-time type checker for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21"}, + {file = "typeguard-4.4.1.tar.gz", hash = "sha256:0d22a89d00b453b47c49875f42b6601b961757541a2e1e0ef517b6e24213c21b"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +typing-extensions = ">=4.10.0" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)"] +test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"] + [[package]] name = "typer" version = "0.7.0" @@ -1935,24 +2181,24 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.12" +version = "6.0.12.20240917" description = "Typing stubs for PyYAML" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, - {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, + {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, + {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, ] [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -1972,71 +2218,75 @@ typing-extensions = ">=3.7.4" [[package]] name = "tzdata" -version = "2023.3" +version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] name = "urllib3" -version = "2.0.7" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, - {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.24.5" +version = "20.27.1" description = "Virtual Python Environment builder" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, - {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, + {file = "virtualenv-20.27.1-py3-none-any.whl", hash = "sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4"}, + {file = "virtualenv-20.27.1.tar.gz", hash = "sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<4" +platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "zipp" -version = "3.17.0" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [extras] docs = ["sphinx-rtd-theme", "sphinxcontrib-napoleon"] [metadata] lock-version = "2.0" -python-versions = "^3.9,<3.11" -content-hash = "b97fc74877c11a5d476e1f0bdf4c39df5311bfaca992499148a810dc7e21f838" +python-versions = "^3.9,<3.13" +content-hash = "d49ad6e8894cc350891ad8b9eaa2c0d1cae82842ec9227712955fdc8fa821900" diff --git a/pyproject.toml b/pyproject.toml index 3061aebb..8015144a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,10 +16,10 @@ homepage = "https://github.com/alan-turing-institute/sqlsynthgen" documentation = "https://sqlsynthgen.readthedocs.io/en/stable/" [tool.poetry.dependencies] -python = "^3.9,<3.11" +python = "^3.9,<3.13" pydantic = {extras = ["dotenv"], version = "^1.10.2"} psycopg2-binary = "^2.9.5" -sqlalchemy-utils = "^0.38.3" +sqlalchemy-utils = "^0.41.2" mimesis = "^6.1.1" typer = "^0.7.0" pyyaml = "^6.0" @@ -30,11 +30,12 @@ smartnoise-sql = "^1" jinja2 = "^3.1.2" black = "^23.3.0" jsonschema = "^4.17.3" -sqlacodegen = "^3.0.0rc3" -asyncpg = "^0.27.0" -greenlet = "^2.0.2" +sqlacodegen = { git = "https://github.com/amacfie/sqlacodegen", branch = "domain_check_textclause" } +asyncpg = "^0.30.0" +greenlet = "^3.1.1" pymysql = "^1.1.0" pandas = "^2" +parsy = "^2.1" [tool.poetry.group.dev.dependencies] isort = "^5.10.1" diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index f2bb62f6..d9cfd6c2 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -25,9 +25,11 @@ read_config_file, ) +from .serialize_metadata import dict_to_metadata + # pylint: disable=too-many-arguments -ORM_FILENAME: Final[str] = "orm.py" +ORM_FILENAME: Final[str] = "orm.yaml" SSG_FILENAME: Final[str] = "ssg.py" STATS_FILENAME: Final[str] = "src-stats.yaml" @@ -52,6 +54,14 @@ def _require_src_db_dsn(settings: Settings) -> str: return src_dsn +def load_metadata(orm_file_name, config): + tables_config = config.get("tables", {}) + # Remove tables_config..ignore + with open(orm_file_name) as orm_fh: + meta_dict = yaml.load(orm_fh, yaml.Loader) + return dict_to_metadata(meta_dict) + + @app.command() def create_data( orm_file: str = Option(ORM_FILENAME), @@ -79,7 +89,7 @@ def create_data( $ sqlsynthgen create-data Args: - orm_file (str): Name of Python ORM file. + orm_file (str): Name of YAML ORM file. Must be in the current working directory. ssg_file (str): Name of generators file. Must be in the current working directory. @@ -89,11 +99,9 @@ def create_data( """ conf_logger(verbose) logger.debug("Creating data.") - orm_module = import_file(orm_file) + orm_metadata = load_metadata(orm_file, config) ssg_module = import_file(ssg_file) config = read_config_file(config_file) if config_file is not None else {} - tables_config = config.get("tables", {}) - orm_metadata = get_orm_metadata(orm_module, tables_config) table_generator_dict = ssg_module.table_generator_dict story_generator_list = ssg_module.story_generator_list row_counts = create_db_data( @@ -143,7 +151,7 @@ def create_tables( config_file: Optional[str] = Option(None), verbose: bool = Option(False, "--verbose", "-v"), ) -> None: - """Create schema from a SQLAlchemy ORM file. + """Create schema from the ORM YAML file. This CLI command creates the destination schema using object relational model declared as Python tables. @@ -160,9 +168,7 @@ def create_tables( conf_logger(verbose) logger.debug("Creating tables.") config = read_config_file(config_file) if config_file is not None else {} - tables_config = config.get("tables", {}) - orm_module = import_file(orm_file) - orm_metadata = get_orm_metadata(orm_module, tables_config) + orm_metadata = load_metadata(orm_file, config) create_db_tables(orm_metadata) logger.debug("Tables created.") @@ -203,10 +209,10 @@ def make_generators( # Check that src_dsn is set, even though we don't need it here. _require_src_db_dsn(settings) - orm_module: ModuleType = import_file(orm_file) generator_config = read_config_file(config_file) if config_file is not None else {} + orm_metadata = load_metadata(orm_file, generator_config) result: str = make_table_generators( - orm_module, generator_config, stats_file, overwrite_files=force + orm_metadata, generator_config, stats_file, overwrite_files=force ) ssg_file_path.write_text(result, encoding="utf-8") @@ -265,8 +271,8 @@ def make_tables( Args: config_file (str): Path to configuration file. - orm_file (str): Path to write the Python ORM file. - force (bool): Overwrite ORM file, if exists. Default to False. + orm_file (str): Path to write the Python YAML file. + force (bool): Overwrite YAML file, if exists. Default to False. verbose (bool): Be verbose. Default to False. """ conf_logger(verbose) @@ -317,9 +323,9 @@ def remove_data( if yes: logger.debug("Truncating non-vocabulary tables.") config = read_config_file(config_file) if config_file is not None else {} - orm_module = import_file(orm_file) + metadata = load_metadata(orm_file, config) ssg_module = import_file(ssg_file) - remove_db_data(orm_module, ssg_module, config) + remove_db_data(metadata, ssg_module, config) logger.debug("Non-vocabulary tables truncated.") else: logger.info("Would truncate non-vocabulary tables if called with --yes.") @@ -338,9 +344,9 @@ def remove_vocab( if yes: logger.debug("Truncating vocabulary tables.") config = read_config_file(config_file) if config_file is not None else {} - orm_module = import_file(orm_file) + metadata = load_metadata(orm_file, config) ssg_module = import_file(ssg_file) - remove_db_vocab(orm_module, ssg_module, config) + remove_db_vocab(metadata, ssg_module) logger.debug("Vocabulary tables truncated.") else: logger.info("Would truncate vocabulary tables if called with --yes.") @@ -361,8 +367,8 @@ def remove_tables( if yes: logger.debug("Dropping tables.") config = read_config_file(config_file) if config_file is not None else {} - orm_module = import_file(orm_file) - remove_db_tables(orm_module, config) + metadata = load_metadata(orm_file, config) + remove_db_tables(metadata) logger.debug("Tables dropped.") else: logger.info("Would remove tables if called with --yes.") diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index d031890f..d5df25ea 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -6,13 +6,14 @@ from pathlib import Path from types import ModuleType from typing import Any, Final, Mapping, Optional, Sequence, Tuple +import yaml import pandas as pd import snsql from black import FileMode, format_str from jinja2 import Environment, FileSystemLoader, Template from mimesis.providers.base import BaseProvider -from sqlacodegen.generators import DeclarativeGenerator +import sqlalchemy from sqlalchemy import Engine, MetaData, UniqueConstraint, text from sqlalchemy.dialects import postgresql from sqlalchemy.ext.asyncio import AsyncEngine @@ -24,11 +25,12 @@ from sqlsynthgen.utils import ( create_db_engine, download_table, - get_orm_metadata, get_sync_engine, logger, ) +from .serialize_metadata import metadata_to_dict + PROVIDER_IMPORTS: Final[list[str]] = [] for entry_name, entry in inspect.getmembers(providers, inspect.isclass): if issubclass(entry, BaseProvider) and entry.__module__ == "sqlsynthgen.providers": @@ -154,7 +156,7 @@ def _get_row_generator( def _get_default_generator( - tables_module: ModuleType, column: Column + metadata: MetaData, column: Column ) -> RowGeneratorInfo: """Get default generator information, for the given column.""" # If it's a primary key column, we presume that primary keys are populated @@ -316,7 +318,7 @@ def _constraint_sort_key(constraint: UniqueConstraint) -> str: def _get_generator_for_table( - tables_module: ModuleType, table_config: Mapping[str, Any], table: Table + metadata: MetaData, table_config: Mapping[str, Any], table: Table ) -> TableGeneratorInfo: """Get generator information for the given table.""" unique_constraints = sorted( @@ -340,7 +342,7 @@ def _get_generator_for_table( for column in table.columns: if column.name not in columns_covered: # No generator for this column in the user config. - table_data.row_gens.append(_get_default_generator(tables_module, column)) + table_data.row_gens.append(_get_default_generator(metadata, column)) _enforce_unique_constraints(table_data) return table_data @@ -366,7 +368,7 @@ def _get_story_generators(config: Mapping) -> list[StoryGeneratorInfo]: def make_table_generators( # pylint: disable=too-many-locals - tables_module: ModuleType, + metadata: MetaData, config: Mapping, src_stats_filename: Optional[str], overwrite_files: bool = False, @@ -391,7 +393,6 @@ def make_table_generators( # pylint: disable=too-many-locals assert src_dsn != "", "Missing SRC_DSN setting." tables_config = config.get("tables", {}) - metadata = get_orm_metadata(tables_module, tables_config) engine = get_sync_engine(create_db_engine(src_dsn, schema_name=settings.src_schema)) tables: list[TableGeneratorInfo] = [] @@ -406,7 +407,7 @@ def make_table_generators( # pylint: disable=too-many-locals ) ) else: - tables.append(_get_generator_for_table(tables_module, table_config, table)) + tables.append(_get_generator_for_table(metadata, table_config, table)) story_generators = _get_story_generators(config) @@ -486,35 +487,27 @@ def reflect_if(table_name: str, _: Any) -> bool: ignore = table_config.get("ignore", False) return not ignore + schemae = sqlalchemy.inspect(engine).get_schema_names() metadata = MetaData() metadata.reflect( engine, only=reflect_if, ) - - for table_name in metadata.tables.keys(): - table_config = tables_config.get(table_name, {}) - ignore = table_config.get("ignore", False) - if ignore: - logger.warning( - "Table %s is supposed to be ignored but there is a foreign key " - "reference to it. " - "You may need to create this table manually at the dst schema before " - "running create-tables.", - table_name, - ) - - generator = DeclarativeGenerator(metadata, engine, options=()) - code = str(generator.generate()) - - # sqlacodegen falls back on Tables() for tables without PKs, - # but we don't explicitly support Tables and behaviour is unpredictable. - if " = Table(" in code: - logger.warning( - "Table without PK detected. sqlsynthgen may not be able to continue.", - ) - - return format_str(code, mode=FileMode()) + meta_dict = metadata_to_dict(metadata, db_dsn, engine.dialect) + +# for table_name in metadata.tables.keys(): +# table_config = tables_config.get(table_name, {}) +# ignore = table_config.get("ignore", False) +# if ignore: +# logger.warning( +# "Table %s is supposed to be ignored but there is a foreign key " +# "reference to it. " +# "You may need to create this table manually at the dst schema before " +# "running create-tables.", +# table_name, +# ) + + return yaml.dump(meta_dict) async def make_src_stats( diff --git a/sqlsynthgen/remove.py b/sqlsynthgen/remove.py index 1aef8ca1..3994c263 100644 --- a/sqlsynthgen/remove.py +++ b/sqlsynthgen/remove.py @@ -2,25 +2,23 @@ from types import ModuleType from typing import Any, Mapping -from sqlalchemy import delete +from sqlalchemy import delete, MetaData from sqlsynthgen.settings import get_settings from sqlsynthgen.utils import ( create_db_engine, - get_orm_metadata, get_sync_engine, logger, ) def remove_db_data( - orm_module: ModuleType, ssg_module: ModuleType, config: Mapping[str, Any] + metadata: MetaData, ssg_module: ModuleType, config: Mapping[str, Any] ) -> None: """Truncate the synthetic data tables but not the vocabularies.""" settings = get_settings() assert settings.dst_dsn, "Missing destination database settings" tables_config = config.get("tables", {}) - metadata = get_orm_metadata(orm_module, tables_config) dst_engine = get_sync_engine( create_db_engine(settings.dst_dsn, schema_name=settings.dst_schema) ) @@ -34,14 +32,10 @@ def remove_db_data( dst_conn.commit() -def remove_db_vocab( - orm_module: ModuleType, ssg_module: ModuleType, config: Mapping[str, Any] -) -> None: +def remove_db_vocab(metadata: MetaData, ssg_module: ModuleType) -> None: """Truncate the vocabulary tables.""" settings = get_settings() assert settings.dst_dsn, "Missing destination database settings" - tables_config = config.get("tables", {}) - metadata = get_orm_metadata(orm_module, tables_config) dst_engine = get_sync_engine( create_db_engine(settings.dst_dsn, schema_name=settings.dst_schema) ) @@ -55,12 +49,10 @@ def remove_db_vocab( dst_conn.commit() -def remove_db_tables(orm_module: ModuleType, config: Mapping[str, Any]) -> None: +def remove_db_tables(metadata: MetaData) -> None: """Drop the tables in the destination schema.""" settings = get_settings() assert settings.dst_dsn, "Missing destination database settings" - tables_config = config.get("tables", {}) - metadata = get_orm_metadata(orm_module, tables_config) dst_engine = get_sync_engine( create_db_engine(settings.dst_dsn, schema_name=settings.dst_schema) ) diff --git a/sqlsynthgen/serialize_metadata.py b/sqlsynthgen/serialize_metadata.py new file mode 100644 index 00000000..2b5aceb0 --- /dev/null +++ b/sqlsynthgen/serialize_metadata.py @@ -0,0 +1,189 @@ +from enum import StrEnum +import parsy +import re +from sqlalchemy import MetaData, Table, Column, Dialect +from sqlalchemy.dialects import oracle, postgresql +from sqlalchemy.sql import sqltypes +from sqlalchemy.sql.compiler import StrSQLTypeCompiler + +type table_component_t = dict[str, any] +type table_t = dict[str, table_component_t] + +def simple(type_): + """ + Parses a simple sqltypes type. + For example, simple(sqltypes.UUID) takes the string "UUID" and outputs + a UUID class, or fails with any other string. + """ + return parsy.string(type_.__name__).result(type_) + +def integer(): + """ + Parses an integer, outputting that integer. + """ + return parsy.regex(r"-?[0-9]+").map(int) + +def integer_arguments(): + """ + Parses a list of integers. + The integers are surrounded by brackets and separated by + a comma and space. + """ + return parsy.string("(") >> ( + integer().sep_by(parsy.string(", ")) + ) << parsy.string(")") + +def numeric_type(type_): + """ + Parses TYPE_NAME, TYPE_NAME(2) or TYPE_NAME(2,3) + passing any arguments to the TYPE_NAME constructor. + """ + return parsy.string(type_.__name__ + ) >> integer_arguments().optional([]).combine(type_) + +def string_type(type_): + @parsy.generate(type_.__name__) + def st_parser(): + """ + Parses TYPE_NAME, TYPE_NAME(32), TYPE_NAME COLLATE "fr" + or TYPE_NAME(32) COLLATE "fr" + """ + yield parsy.string(type_.__name__) + length: int | None = yield ( + parsy.string("(") >> integer() << parsy.string(")") + ).optional() + collation: str | None = yield ( + parsy.string(' COLLATE "') >> parsy.regex(r'[^"]*') << parsy.string('"') + ).optional() + return type_(length=length, collation=collation) + return st_parser + +def time_type(type_, pg_type): + @parsy.generate(type_.__name__) + def pgt_parser(): + """ + Parses TYPE_NAME, TYPE_NAME(32), TYPE_NAME WITH TIMEZONE + or TYPE_NAME(32) WITH TIMEZONE + """ + yield parsy.string(type_.__name__) + precision: int | None = yield ( + parsy.string("(") >> integer() << parsy.string(")") + ).optional() + timezone: str | None = yield ( + parsy.string(" WITH TIME ZONE").result(True) + ).optional(False) + if precision is None and not timezone: + # normal sql type + return type_ + return pg_type(precision=precision, timezone=timezone) + return pgt_parser + +SIMPLE_TYPE_PARSER = parsy.alt( + parsy.string("DOUBLE PRECISION").result(sqltypes.DOUBLE_PRECISION), # must be before DOUBLE + simple(sqltypes.FLOAT), + simple(sqltypes.DOUBLE), + simple(sqltypes.INTEGER), + simple(sqltypes.SMALLINT), + simple(sqltypes.BIGINT), + simple(sqltypes.DATETIME), + simple(sqltypes.DATE), + simple(sqltypes.CLOB), + simple(oracle.NCLOB), + simple(sqltypes.UUID), + simple(sqltypes.BLOB), + simple(sqltypes.BOOLEAN), + simple(postgresql.TSVECTOR), + simple(postgresql.BYTEA), + numeric_type(sqltypes.NUMERIC), + numeric_type(sqltypes.DECIMAL), + string_type(sqltypes.CHAR), + string_type(sqltypes.NCHAR), + string_type(sqltypes.VARCHAR), + string_type(sqltypes.NVARCHAR), + string_type(sqltypes.TEXT), + time_type(sqltypes.TIMESTAMP, postgresql.types.TIMESTAMP), + time_type(sqltypes.TIME, postgresql.types.TIME), +) + +@parsy.generate +def type_parser(): + base = yield SIMPLE_TYPE_PARSER + dimensions = yield parsy.string("[]").many().map(len) + if dimensions == 0: + return base + return postgresql.ARRAY(base, dimensions=dimensions) + +def column_to_dict(column: Column, dialect: Dialect) -> str: + type_ = column.type + if isinstance(type_, postgresql.DOMAIN): + # Instead of creating a restricted type, we'll just use the base type. + # It might be better to use the actual type if we could find a good way + # to compile it and also parse the compiled string. + type_ = type_.data_type + if isinstance(type_, postgresql.ENUM): + compiled = "TEXT" + else: + compiled = dialect.type_compiler_instance.process(type_) + return { + "type": compiled + } + +def dict_to_column(name, rep: dict) -> Column: + type_sql = rep["type"] + try: + type_ = type_parser.parse(type_sql) + except parsy.ParseError as e: + print(f"Failed to parse {type_sql}") + raise e + return Column( + name=name, + type_=type_, + ) + +def table_to_dict(table: Table, dialect: Dialect) -> table_t: + """ + Converts a SQL Alchemy Table object into a + Python object ready for conversion to YAML. + """ + return { + "columns": { + str(column.key): column_to_dict(column, dialect) + for (k, column) in table.columns.items() + }, + "schema": table.schema + } + +def dict_to_table(name: str, meta: MetaData, table_dict: table_t) -> Table: + return Table( + name, + meta, + *[ dict_to_column(name, col) + for (name, col) in table_dict.get("columns", {}).items() + ], + schema=table_dict.get("schema") + ) + +def metadata_to_dict(meta: MetaData, dsn: str, dialect: Dialect) -> dict[str, table_t]: + """ + Converts a SQL Alchemy MetaData object into + a Python object ready for conversion to YAML. + """ + return { + "tables": { + str(table.name): table_to_dict(table, dialect) + for (k, table) in meta.tables.items() + }, + "dsn": dsn, + "schema": meta.schema + } + +def dict_to_metadata(obj: dict[str, table_t]) -> MetaData: + """ + Converts a dict to a SQL Alchemy MetaData object. + """ + table_dict = obj.get("tables", {}) + schema = obj.get("schema", "public") + meta = MetaData(schema=schema) + for (k, td) in table_dict.items(): + dict_to_table(k, meta, td) + return meta From 5543f44bb566748b1992b7b9d3411b2aa56dd711 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 28 Nov 2024 13:07:54 +0000 Subject: [PATCH 02/85] primary, nullable and unique added to orm.yaml --- sqlsynthgen/serialize_metadata.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/sqlsynthgen/serialize_metadata.py b/sqlsynthgen/serialize_metadata.py index 2b5aceb0..e185e5ac 100644 --- a/sqlsynthgen/serialize_metadata.py +++ b/sqlsynthgen/serialize_metadata.py @@ -125,7 +125,10 @@ def column_to_dict(column: Column, dialect: Dialect) -> str: else: compiled = dialect.type_compiler_instance.process(type_) return { - "type": compiled + "type": compiled, + "primary": column.primary_key, + "nullable": column.nullable, + "unique": column.unique } def dict_to_column(name, rep: dict) -> Column: @@ -138,6 +141,9 @@ def dict_to_column(name, rep: dict) -> Column: return Column( name=name, type_=type_, + primary_key=rep.get("primary", False), + nullable=rep.get("nullable", None), + unique=rep.get("unique", None), ) def table_to_dict(table: Table, dialect: Dialect) -> table_t: From 5978809a2a7e9e2edce4fb0fd9f11ee2853c5164 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 29 Nov 2024 15:04:21 +0000 Subject: [PATCH 03/85] Foreign keys in orm.yaml --- sqlsynthgen/serialize_metadata.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/sqlsynthgen/serialize_metadata.py b/sqlsynthgen/serialize_metadata.py index e185e5ac..5c061383 100644 --- a/sqlsynthgen/serialize_metadata.py +++ b/sqlsynthgen/serialize_metadata.py @@ -1,10 +1,7 @@ -from enum import StrEnum import parsy -import re -from sqlalchemy import MetaData, Table, Column, Dialect +from sqlalchemy import MetaData, Table, Column, Dialect, ForeignKey from sqlalchemy.dialects import oracle, postgresql from sqlalchemy.sql import sqltypes -from sqlalchemy.sql.compiler import StrSQLTypeCompiler type table_component_t = dict[str, any] type table_t = dict[str, table_component_t] @@ -124,12 +121,16 @@ def column_to_dict(column: Column, dialect: Dialect) -> str: compiled = "TEXT" else: compiled = dialect.type_compiler_instance.process(type_) - return { + result = { "type": compiled, "primary": column.primary_key, "nullable": column.nullable, - "unique": column.unique + "unique": column.unique, } + foreign_keys = [str(fk.target_fullname) for fk in column.foreign_keys] + if foreign_keys: + result["foreign_keys"] = foreign_keys + return result def dict_to_column(name, rep: dict) -> Column: type_sql = rep["type"] @@ -138,7 +139,15 @@ def dict_to_column(name, rep: dict) -> Column: except parsy.ParseError as e: print(f"Failed to parse {type_sql}") raise e + if "foreign_keys" in rep: + args = [ + ForeignKey(fk, ondelete='CASCADE') + for fk in rep["foreign_keys"] + ] + else: + args = [] return Column( + *args, name=name, type_=type_, primary_key=rep.get("primary", False), From 4c7d2e375d11681da358fcb2fbbbeed9cddae621 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 29 Nov 2024 17:41:27 +0000 Subject: [PATCH 04/85] ssg.py code generation fixed --- sqlsynthgen/main.py | 18 ++++++++---- sqlsynthgen/make.py | 49 +++++++-------------------------- sqlsynthgen/templates/ssg.py.j2 | 6 ++-- tests/test_make.py | 7 +++-- 4 files changed, 30 insertions(+), 50 deletions(-) diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index d9cfd6c2..0446391b 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -54,9 +54,11 @@ def _require_src_db_dsn(settings: Settings) -> str: return src_dsn -def load_metadata(orm_file_name, config): - tables_config = config.get("tables", {}) - # Remove tables_config..ignore +def load_metadata(orm_file_name, config=None): + if config is not None and "tables" in config: + tables_config = config["tables"] + # Remove tables_config..ignore + #... with open(orm_file_name) as orm_fh: meta_dict = yaml.load(orm_fh, yaml.Loader) return dict_to_metadata(meta_dict) @@ -99,9 +101,8 @@ def create_data( """ conf_logger(verbose) logger.debug("Creating data.") - orm_metadata = load_metadata(orm_file, config) + orm_metadata = load_metadata(orm_file, config_file) ssg_module = import_file(ssg_file) - config = read_config_file(config_file) if config_file is not None else {} table_generator_dict = ssg_module.table_generator_dict story_generator_list = ssg_module.story_generator_list row_counts = create_db_data( @@ -212,7 +213,12 @@ def make_generators( generator_config = read_config_file(config_file) if config_file is not None else {} orm_metadata = load_metadata(orm_file, generator_config) result: str = make_table_generators( - orm_metadata, generator_config, stats_file, overwrite_files=force + orm_metadata, + generator_config, + orm_file, + config_file, + stats_file, + overwrite_files=force ) ssg_file_path.write_text(result, encoding="utf-8") diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index d5df25ea..45934aba 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -87,23 +87,6 @@ class StoryGeneratorInfo: num_stories_per_pass: int -def _orm_class_from_table_name( - tables_module: ModuleType, full_name: str -) -> Optional[Tuple[str, str]]: - """Return the ORM class corresponding to a table name.""" - # If the class in tables_module is an SQLAlchemy ORM class - for mapper in tables_module.Base.registry.mappers: - cls = mapper.class_ - if cls.__table__.fullname == full_name: - return cls.__name__, cls.__name__ + ".__table__" - - # If the class in tables_module is a SQLAlchemy Core Table - guess = "t_" + full_name - if guess in dir(tables_module): - return guess, guess - return None - - def _get_function_call( function_name: str, positional_arguments: Optional[Sequence[Any]] = None, @@ -156,7 +139,7 @@ def _get_row_generator( def _get_default_generator( - metadata: MetaData, column: Column + column: Column ) -> RowGeneratorInfo: """Get default generator information, for the given column.""" # If it's a primary key column, we presume that primary keys are populated @@ -177,17 +160,12 @@ def _get_default_generator( target_name_parts = fkey.target_fullname.split(".") target_table_name = ".".join(target_name_parts[:-1]) target_column_name = target_name_parts[-1] - class_and_name = _orm_class_from_table_name(tables_module, target_table_name) - if not class_and_name: - raise ValueError(f"Could not find the ORM class for {target_table_name}.") - - target_orm_class, _ = class_and_name variable_names = [column.name] generator_function = "generic.column_value_provider.column_value" generator_arguments = [ "dst_db_conn", - f"{tables_module.__name__}.{target_orm_class}", + f"metadata.tables['{target_table_name}']", f'"{target_column_name}"', ] @@ -331,7 +309,7 @@ def _get_generator_for_table( ) table_data: TableGeneratorInfo = TableGeneratorInfo( table_name=table.name, - class_name=table.name + "Generator", + class_name=table.name.title() + "Generator", rows_per_pass=table_config.get("num_rows_per_pass", 1), unique_constraints=unique_constraints, ) @@ -342,7 +320,7 @@ def _get_generator_for_table( for column in table.columns: if column.name not in columns_covered: # No generator for this column in the user config. - table_data.row_gens.append(_get_default_generator(metadata, column)) + table_data.row_gens.append(_get_default_generator(column)) _enforce_unique_constraints(table_data) return table_data @@ -370,13 +348,14 @@ def _get_story_generators(config: Mapping) -> list[StoryGeneratorInfo]: def make_table_generators( # pylint: disable=too-many-locals metadata: MetaData, config: Mapping, + orm_filename: str, + config_filename: str, src_stats_filename: Optional[str], overwrite_files: bool = False, ) -> str: """Create sqlsynthgen generator classes from a sqlacodegen-generated file. Args: - tables_module: A sqlacodegen-generated module. config: Configuration to control the generator creation. src_stats_filename: A filename for where to read src stats from. Optional, if `None` this feature will be skipped @@ -403,7 +382,7 @@ def make_table_generators( # pylint: disable=too-many-locals if table_config.get("vocabulary_table") is True: vocabulary_tables.append( _get_generator_for_vocabulary_table( - tables_module, table, engine, overwrite_files=overwrite_files + table, engine, overwrite_files=overwrite_files ) ) else: @@ -415,7 +394,8 @@ def make_table_generators( # pylint: disable=too-many-locals return generate_ssg_content( { "provider_imports": PROVIDER_IMPORTS, - "tables_module": tables_module, + "orm_file_name": orm_filename, + "config_file_name": repr(config_filename), "row_generator_module_name": row_generator_module_name, "story_generator_module_name": story_generator_module_name, "src_stats_filename": src_stats_filename, @@ -441,20 +421,11 @@ def generate_ssg_content(template_context: Mapping[str, Any]) -> str: def _get_generator_for_vocabulary_table( - tables_module: ModuleType, table: Table, engine: Engine, table_file_name: Optional[str] = None, overwrite_files: bool = False, ) -> VocabularyTableGeneratorInfo: - class_and_name: Optional[Tuple[str, str]] = _orm_class_from_table_name( - tables_module, table.fullname - ) - if not class_and_name: - raise RuntimeError(f"Couldn't find {table.fullname} in {tables_module}") - - class_name, table_name = class_and_name - yaml_file_name: str = table_file_name or table.fullname + ".yaml" if Path(yaml_file_name).exists() and not overwrite_files: logger.error("%s already exists. Exiting...", yaml_file_name) @@ -468,7 +439,7 @@ def _get_generator_for_vocabulary_table( class_name=class_name, dictionary_entry=table.name, variable_name=f"{class_name.lower()}_vocab", - table_name=table_name, + table_name=table.name, ) diff --git a/sqlsynthgen/templates/ssg.py.j2 b/sqlsynthgen/templates/ssg.py.j2 index a2456414..4b43625b 100644 --- a/sqlsynthgen/templates/ssg.py.j2 +++ b/sqlsynthgen/templates/ssg.py.j2 @@ -3,6 +3,7 @@ from mimesis import Generic from mimesis.locales import Locale from sqlsynthgen.base import FileUploader, TableGenerator from sqlsynthgen.unique_generator import UniqueGenerator +from sqlsynthgen.main import load_metadata generic = Generic(locale=Locale.EN_GB) @@ -11,7 +12,8 @@ from sqlsynthgen.providers import {{ provider_import }} generic.add_provider({{ provider_import }}) {% endfor %} -import {{ tables_module.__name__ }} +metadata = load_metadata("{{ orm_file_name }}", {{ config_file_name }}) + {% if row_generator_module_name is not none %} import {{ row_generator_module_name }} {% endif %} @@ -26,7 +28,7 @@ with open("{{ src_stats_filename }}", "r", encoding="utf-8") as f: {% endif %} {% for table_data in vocabulary_tables %} -{{ table_data.variable_name }} = FileUploader({{ tables_module.__name__ }}.{{ table_data.table_name }}) +{{ table_data.variable_name }} = FileUploader(metadata.tables["{{ table_data.table_name }}"]) {% endfor %} {% for table_data in tables %} diff --git a/tests/test_make.py b/tests/test_make.py index 90938a81..c57a78fb 100644 --- a/tests/test_make.py +++ b/tests/test_make.py @@ -43,6 +43,7 @@ def test_make_table_generators( self, mock_download: MagicMock, mock_create: MagicMock, + #... and orm_file_name and config_file mock_get_settings: MagicMock, mock_path: MagicMock, ) -> None: @@ -58,7 +59,7 @@ def test_make_table_generators( config = yaml.safe_load(f) stats_path = "example_stats.yaml" - actual = make_table_generators(example_orm, config, stats_path) + actual = make_table_generators(example_orm, config, stats_path) #... and orm_file_name and config_file_name # 5 because there are 5 vocabulary tables in the example orm. self.assertEqual(mock_path.call_count, 5) self.assertEqual(mock_download.call_count, 5) @@ -85,7 +86,7 @@ def test_make_generators_do_not_overwrite( stats_path = "example_stats.yaml" try: - make_table_generators(example_orm, configuration, stats_path) + make_table_generators(example_orm, configuration, stats_path) #... and orm_file_name and config_file_name except SystemExit: pass @@ -117,7 +118,7 @@ def test_make_generators_force_overwrite( stats_path: str = "example_stats.yaml" actual: str = make_table_generators( - example_orm, config, stats_path, overwrite_files=True + example_orm, config, stats_path, overwrite_files=True #... and orm_file_name and config_file_name ) mock_create.assert_called_once() From a137cae3be34b63d7edd6c97bef3875ab3889edc Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 3 Dec 2024 12:27:59 +0000 Subject: [PATCH 05/85] Creating data now works for Paglia! ...with a small config file --- .gitignore | 1 + sqlsynthgen/make.py | 130 ++++++++++++++++++++++---------- sqlsynthgen/templates/ssg.py.j2 | 2 - 3 files changed, 90 insertions(+), 43 deletions(-) diff --git a/.gitignore b/.gitignore index 71ef09b1..2f04884e 100644 --- a/.gitignore +++ b/.gitignore @@ -146,3 +146,4 @@ docs/temp/* ssg.py orm.yaml src-stats.yaml +config.yaml diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 45934aba..1ef73251 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -4,8 +4,7 @@ import sys from dataclasses import dataclass, field from pathlib import Path -from types import ModuleType -from typing import Any, Final, Mapping, Optional, Sequence, Tuple +from typing import Any, Callable, Final, Mapping, Optional, Sequence, Tuple import yaml import pandas as pd @@ -18,7 +17,7 @@ from sqlalchemy.dialects import postgresql from sqlalchemy.ext.asyncio import AsyncEngine from sqlalchemy.schema import Column, Table -from sqlalchemy.sql import sqltypes +from sqlalchemy.sql import sqltypes, type_api from sqlsynthgen import providers from sqlsynthgen.settings import get_settings @@ -168,25 +167,101 @@ def _get_default_generator( f"metadata.tables['{target_table_name}']", f'"{target_column_name}"', ] + return RowGeneratorInfo( + primary_key=column.primary_key, + variable_names=variable_names, + function_call=_get_function_call( + function_name=generator_function, positional_arguments=generator_arguments + ), + ) # Otherwise generate values based on just the datatype of the column. - else: - ( - variable_names, - generator_function, - generator_arguments, - ) = _get_provider_for_column(column) + ( + variable_names, + generator_function, + generator_arguments, + ) = _get_provider_for_column(column) return RowGeneratorInfo( primary_key=column.primary_key, variable_names=variable_names, function_call=_get_function_call( - function_name=generator_function, positional_arguments=generator_arguments + function_name=generator_function, keyword_arguments=generator_arguments ), ) -def _get_provider_for_column(column: Column) -> Tuple[list[str], str, list[str]]: +def _numeric_generator(column_type: type_api.TypeEngine) -> tuple[str, dict[str, str]]: + """ + Returns the name of a generator and maybe arguments + that limit its range to the permitted scale. + """ + if column_type.scale is None: + return ("generic.numeric.float_number", {}) + return ("generic.numeric.float_number", { + "start": 0, + "end": 10 ** column_type.scale - 1, + }) + + +def _string_generator(column_type: type_api.TypeEngine) -> tuple[str, dict[str, str]]: + """ + Returns the name of a string generator and maybe arguments + that limit its length. + """ + column_size: Optional[int] = getattr(column_type, "length", None) + if column_size is None: + return ("generic.text.color", {}) + return ("generic.person.password", { "length": str(column_size) }) + + +_COLUMN_TYPE_TO_GENERATOR = { + sqltypes.Integer: "generic.numeric.integer_number", + sqltypes.Boolean: "generic.development.boolean", + sqltypes.Date: "generic.datetime.date", + sqltypes.DateTime: "generic.datetime.datetime", + sqltypes.Numeric: _numeric_generator, + sqltypes.LargeBinary: "generic.bytes_provider.bytes", + sqltypes.Uuid: "generic.cryptographic.uuid", + postgresql.UUID: "generic.cryptographic.uuid", + sqltypes.String: _string_generator, +} + +def _get_generator_for_column(column_t: type) -> str | Callable[ + [type_api.TypeEngine], tuple[str, dict[str, str]]]: + """ + Gets a generator from a column type. + + Returns either a string representing the callable, or a callable that, + given the column.type will return a tuple (string representing generator + callable, dict of keyword arguments to pass to the callable). + """ + if column_t in _COLUMN_TYPE_TO_GENERATOR: + return _COLUMN_TYPE_TO_GENERATOR.get(column_t, None) + + # Search exhaustively for a superclass to the columns actual type + for key, value in _COLUMN_TYPE_TO_GENERATOR.items(): + if issubclass(column_t, key): + return value + + return None + + +def _get_generator_and_arguments(column_type: type_api.TypeEngine) -> tuple[str, dict[str, str]]: + """ + Gets the generator and its arguments from the column type, returning + a tuple of a string representing the generator callable and a dict of + keyword arguments to supply to it. + """ + generator_function = _get_generator_for_column(type(column_type)) + + generator_arguments: dict[str, str] = {} + if callable(generator_function): + (generator_function, generator_arguments) = generator_function(column_type) + return generator_function,generator_arguments + + +def _get_provider_for_column(column: Column) -> Tuple[list[str], str, dict[str, str]]: """ Get a default Mimesis provider and its arguments for a SQL column type. @@ -198,32 +273,8 @@ def _get_provider_for_column(column: Column) -> Tuple[list[str], str, list[str]] generator function and any generator arguments. """ variable_names: list[str] = [column.name] - generator_arguments: list[str] = [] - - column_type = type(column.type) - column_size: Optional[int] = getattr(column.type, "length", None) - - mapping = { - (sqltypes.Integer, False): "generic.numeric.integer_number", - (sqltypes.Boolean, False): "generic.development.boolean", - (sqltypes.Date, False): "generic.datetime.date", - (sqltypes.DateTime, False): "generic.datetime.datetime", - (sqltypes.Numeric, False): "generic.numeric.float_number", - (sqltypes.LargeBinary, False): "generic.bytes_provider.bytes", - (sqltypes.Uuid, False): "generic.cryptographic.uuid", - (postgresql.UUID, False): "generic.cryptographic.uuid", - (sqltypes.String, False): "generic.text.color", - (sqltypes.String, True): "generic.person.password", - } - - generator_function = mapping.get((column_type, column_size is not None), None) - # Try if we know how to generate for a superclass of this type. - if not generator_function: - for key, value in mapping.items(): - if issubclass(column_type, key[0]) and key[1] == (column_size is not None): - generator_function = value - break + generator_function, generator_arguments = _get_generator_and_arguments(column.type) # If we still don't have a generator, use null and warn. if not generator_function: @@ -232,11 +283,9 @@ def _get_provider_for_column(column: Column) -> Tuple[list[str], str, list[str]] "Unsupported SQLAlchemy type %s for column %s. " "Setting this column to NULL always, " "you may want to configure a row generator for it instead.", - column_type, + column.type, column.name, ) - elif column_size: - generator_arguments.append(str(column_size)) return variable_names, generator_function, generator_arguments @@ -436,9 +485,8 @@ def _get_generator_for_vocabulary_table( logger.debug("Done downloading %s", table.name) return VocabularyTableGeneratorInfo( - class_name=class_name, dictionary_entry=table.name, - variable_name=f"{class_name.lower()}_vocab", + variable_name=f"{table.name.lower()}_vocab", table_name=table.name, ) diff --git a/sqlsynthgen/templates/ssg.py.j2 b/sqlsynthgen/templates/ssg.py.j2 index 4b43625b..3733f1da 100644 --- a/sqlsynthgen/templates/ssg.py.j2 +++ b/sqlsynthgen/templates/ssg.py.j2 @@ -53,11 +53,9 @@ class {{ table_data.class_name }}(TableGenerator): def __call__(self, dst_db_conn): result = {} {% for row_gen in table_data.row_gens %} - {% if not row_gen.primary_key %} {% for vn in row_gen.variable_names %} result["{{vn}}"]{%- if not loop.last %}, {% endif %} {% endfor %} = {{ row_gen.function_call.function_name }}({{ row_gen.function_call.argument_values| join(", ") }}) - {% endif %} {% endfor %} return result {% endfor %} From 709eb07878fa5e642e6b2beffed6ba3d6692b33f Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 5 Dec 2024 11:40:35 +0000 Subject: [PATCH 06/85] Primary key increment, unique constraint support --- sqlsynthgen/make.py | 51 +++++++++++++++++++++++++------ sqlsynthgen/serialize_metadata.py | 27 ++++++++++++++-- sqlsynthgen/templates/ssg.py.j2 | 3 +- 3 files changed, 68 insertions(+), 13 deletions(-) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 1ef73251..16ae00a5 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -191,11 +191,12 @@ def _get_default_generator( ) -def _numeric_generator(column_type: type_api.TypeEngine) -> tuple[str, dict[str, str]]: +def _numeric_generator(column: Column) -> tuple[str, dict[str, str]]: """ Returns the name of a generator and maybe arguments that limit its range to the permitted scale. """ + column_type = column.type if column_type.scale is None: return ("generic.numeric.float_number", {}) return ("generic.numeric.float_number", { @@ -204,22 +205,32 @@ def _numeric_generator(column_type: type_api.TypeEngine) -> tuple[str, dict[str, }) -def _string_generator(column_type: type_api.TypeEngine) -> tuple[str, dict[str, str]]: +def _string_generator(column: Column) -> tuple[str, dict[str, str]]: """ Returns the name of a string generator and maybe arguments that limit its length. """ - column_size: Optional[int] = getattr(column_type, "length", None) + column_size: Optional[int] = getattr(column.type, "length", None) if column_size is None: return ("generic.text.color", {}) return ("generic.person.password", { "length": str(column_size) }) +def _integer_generator(column: Column) -> tuple[str, dict[str, str]]: + """ + Returns the name of an integer generator. + """ + if not column.primary_key: + return ("generic.numeric.integer_number", {}) + return ("numeric.increment", { + "accumulator": f'"{column.table.fullname}.{column.name}"' + }) _COLUMN_TYPE_TO_GENERATOR = { sqltypes.Integer: "generic.numeric.integer_number", sqltypes.Boolean: "generic.development.boolean", sqltypes.Date: "generic.datetime.date", sqltypes.DateTime: "generic.datetime.datetime", + sqltypes.Integer: _integer_generator, # must be before Numeric sqltypes.Numeric: _numeric_generator, sqltypes.LargeBinary: "generic.bytes_provider.bytes", sqltypes.Uuid: "generic.cryptographic.uuid", @@ -237,7 +248,7 @@ def _get_generator_for_column(column_t: type) -> str | Callable[ callable, dict of keyword arguments to pass to the callable). """ if column_t in _COLUMN_TYPE_TO_GENERATOR: - return _COLUMN_TYPE_TO_GENERATOR.get(column_t, None) + return _COLUMN_TYPE_TO_GENERATOR[column_t] # Search exhaustively for a superclass to the columns actual type for key, value in _COLUMN_TYPE_TO_GENERATOR.items(): @@ -247,17 +258,17 @@ def _get_generator_for_column(column_t: type) -> str | Callable[ return None -def _get_generator_and_arguments(column_type: type_api.TypeEngine) -> tuple[str, dict[str, str]]: +def _get_generator_and_arguments(column: Column) -> tuple[str, dict[str, str]]: """ Gets the generator and its arguments from the column type, returning a tuple of a string representing the generator callable and a dict of keyword arguments to supply to it. """ - generator_function = _get_generator_for_column(type(column_type)) + generator_function = _get_generator_for_column(type(column.type)) generator_arguments: dict[str, str] = {} if callable(generator_function): - (generator_function, generator_arguments) = generator_function(column_type) + (generator_function, generator_arguments) = generator_function(column) return generator_function,generator_arguments @@ -274,7 +285,7 @@ def _get_provider_for_column(column: Column) -> Tuple[list[str], str, dict[str, """ variable_names: list[str] = [column.name] - generator_function, generator_arguments = _get_generator_and_arguments(column.type) + generator_function, generator_arguments = _get_generator_and_arguments(column) # If we still don't have a generator, use null and warn. if not generator_function: @@ -344,8 +355,19 @@ def _constraint_sort_key(constraint: UniqueConstraint) -> str: ) +class _PrimaryConstraint: + """ + Describes a Uniqueness constraint for when multiple + columns in a table comprise the primary key. Not a + real constraint, but enough to write ssg.py. + """ + def __init__(self, *columns: Column, name: str): + self.name = name + self.columns = columns + + def _get_generator_for_table( - metadata: MetaData, table_config: Mapping[str, Any], table: Table + table_config: Mapping[str, Any], table: Table ) -> TableGeneratorInfo: """Get generator information for the given table.""" unique_constraints = sorted( @@ -356,6 +378,15 @@ def _get_generator_for_table( ), key=_constraint_sort_key, ) + primary_keys = [ + c for c in table.columns + if c.primary_key + ] + if 1 < len(primary_keys): + unique_constraints.append(_PrimaryConstraint( + *primary_keys, + name=f"{table.name}_primary_key" + )) table_data: TableGeneratorInfo = TableGeneratorInfo( table_name=table.name, class_name=table.name.title() + "Generator", @@ -435,7 +466,7 @@ def make_table_generators( # pylint: disable=too-many-locals ) ) else: - tables.append(_get_generator_for_table(metadata, table_config, table)) + tables.append(_get_generator_for_table(table_config, table)) story_generators = _get_story_generators(config) diff --git a/sqlsynthgen/serialize_metadata.py b/sqlsynthgen/serialize_metadata.py index 5c061383..33e0d580 100644 --- a/sqlsynthgen/serialize_metadata.py +++ b/sqlsynthgen/serialize_metadata.py @@ -1,7 +1,7 @@ import parsy from sqlalchemy import MetaData, Table, Column, Dialect, ForeignKey from sqlalchemy.dialects import oracle, postgresql -from sqlalchemy.sql import sqltypes +from sqlalchemy.sql import sqltypes, schema type table_component_t = dict[str, any] type table_t = dict[str, table_component_t] @@ -155,6 +155,21 @@ def dict_to_column(name, rep: dict) -> Column: unique=rep.get("unique", None), ) +def dict_to_unique(rep: dict) -> schema.UniqueConstraint: + return schema.UniqueConstraint( + *rep.get("columns", []), + name=rep.get("name", None) + ) + +def unique_to_dict(constraint: schema.UniqueConstraint) -> dict: + return { + "name": constraint.name, + "columns": [ + str(col.name) + for col in constraint.columns + ] + } + def table_to_dict(table: Table, dialect: Dialect) -> table_t: """ Converts a SQL Alchemy Table object into a @@ -165,7 +180,12 @@ def table_to_dict(table: Table, dialect: Dialect) -> table_t: str(column.key): column_to_dict(column, dialect) for (k, column) in table.columns.items() }, - "schema": table.schema + "schema": table.schema, + "unique": [ + unique_to_dict(constraint) + for constraint in table.constraints + if isinstance(constraint, schema.UniqueConstraint) + ], } def dict_to_table(name: str, meta: MetaData, table_dict: table_t) -> Table: @@ -175,6 +195,9 @@ def dict_to_table(name: str, meta: MetaData, table_dict: table_t) -> Table: *[ dict_to_column(name, col) for (name, col) in table_dict.get("columns", {}).items() ], + *[ dict_to_unique(constraint) + for constraint in table_dict.get("unique", []) + ], schema=table_dict.get("schema") ) diff --git a/sqlsynthgen/templates/ssg.py.j2 b/sqlsynthgen/templates/ssg.py.j2 index 3733f1da..27d9735c 100644 --- a/sqlsynthgen/templates/ssg.py.j2 +++ b/sqlsynthgen/templates/ssg.py.j2 @@ -1,11 +1,12 @@ """This file was auto-generated by sqlsynthgen but can be edited manually.""" -from mimesis import Generic +from mimesis import Generic, Numeric from mimesis.locales import Locale from sqlsynthgen.base import FileUploader, TableGenerator from sqlsynthgen.unique_generator import UniqueGenerator from sqlsynthgen.main import load_metadata generic = Generic(locale=Locale.EN_GB) +numeric = Numeric() {% for provider_import in provider_imports %} from sqlsynthgen.providers import {{ provider_import }} From ec6c749a950684cf96f0b2a4005c7082053f9f52 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 6 Dec 2024 14:43:40 +0000 Subject: [PATCH 07/85] New unique constraint handling. Added WITHOUT TIME ZONE. Removed sqlacodegen dependency. --- poetry.lock | 349 +++++++++++++----------------- pyproject.toml | 1 - sqlsynthgen/main.py | 6 +- sqlsynthgen/make.py | 56 +---- sqlsynthgen/serialize_metadata.py | 8 +- sqlsynthgen/templates/ssg.py.j2 | 68 ++++-- 6 files changed, 220 insertions(+), 268 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1da95e9f..0f73d850 100644 --- a/poetry.lock +++ b/poetry.lock @@ -534,13 +534,13 @@ files = [ [[package]] name = "identify" -version = "2.6.2" +version = "2.6.3" description = "File identification library for Python" optional = false python-versions = ">=3.9" files = [ - {file = "identify-2.6.2-py2.py3-none-any.whl", hash = "sha256:c097384259f49e372f4ea00a19719d95ae27dd5ff0fd77ad630aa891306b82f3"}, - {file = "identify-2.6.2.tar.gz", hash = "sha256:fab5c716c24d7a789775228823797296a2994b075fb6080ac83a102772a98cbd"}, + {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"}, + {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"}, ] [package.extras] @@ -594,29 +594,6 @@ perf = ["ipython"] test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] -[[package]] -name = "inflect" -version = "7.4.0" -description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles" -optional = false -python-versions = ">=3.8" -files = [ - {file = "inflect-7.4.0-py3-none-any.whl", hash = "sha256:85af0997ee2bda942b1c1eed8c8a827abda91aa3e22d1efaa0eea817f9350ce7"}, - {file = "inflect-7.4.0.tar.gz", hash = "sha256:904baa17cc2cb74827a6c27b95692e95670dadc72b208b3e8c1c05aeed47026b"}, -] - -[package.dependencies] -more-itertools = ">=8.5.0" -typeguard = ">=4.0.1" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["pygments", "pytest (>=6,!=8.1.*)"] -type = ["pytest-mypy"] - [[package]] name = "isort" version = "5.13.2" @@ -833,17 +810,6 @@ files = [ {file = "mimesis-6.1.1.tar.gz", hash = "sha256:044ac378c61db0e06832ff722548fd6e604881d36bc938002e0bd5b85eeb6a98"}, ] -[[package]] -name = "more-itertools" -version = "10.5.0" -description = "More routines for operating on iterables, beyond itertools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"}, - {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"}, -] - [[package]] name = "mypy" version = "1.13.0" @@ -1403,17 +1369,17 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylint" -version = "3.3.1" +version = "3.3.2" description = "python code static checker" optional = false python-versions = ">=3.9.0" files = [ - {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, - {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, + {file = "pylint-3.3.2-py3-none-any.whl", hash = "sha256:77f068c287d49b8683cd7c6e624243c74f92890f767f106ffa1ddf3c0a54cb7a"}, + {file = "pylint-3.3.2.tar.gz", hash = "sha256:9ec054ec992cd05ad30a6df1676229739a73f8feeabf3912c995d17601052b01"}, ] [package.dependencies] -astroid = ">=3.3.4,<=3.4.0-dev0" +astroid = ">=3.3.5,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -1598,101 +1564,114 @@ docutils = ">=0.11,<1.0" [[package]] name = "rpds-py" -version = "0.21.0" +version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" files = [ - {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"}, - {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"}, - {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"}, - {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"}, - {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"}, - {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"}, - {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"}, - {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"}, - {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"}, - {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"}, - {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"}, - {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"}, - {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"}, - {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"}, - {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"}, - {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"}, - {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"}, - {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"}, - {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"}, - {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"}, - {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, + {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, + {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, + {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, + {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, + {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, + {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, + {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, + {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, + {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, + {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, + {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, ] [[package]] @@ -1720,13 +1699,13 @@ toml = ["tomli (>=2.0,<3.0)"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -1936,33 +1915,6 @@ lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] -[[package]] -name = "sqlacodegen" -version = "0.0.post331" -description = "Automatic model code generator for SQLAlchemy" -optional = false -python-versions = ">=3.8" -files = [] -develop = false - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.10\""} -inflect = ">=4.0.0" -SQLAlchemy = ">=2.0.29" - -[package.extras] -citext = ["sqlalchemy-citext (>=1.7.0)"] -geoalchemy2 = ["geoalchemy2 (>=0.11.1)"] -pgvector = ["pgvector (>=0.2.4)"] -sqlmodel = ["sqlmodel (>=0.0.12)"] -test = ["coverage (>=7)", "mysql-connector-python", "psycopg2-binary", "pytest (>=7.4)"] - -[package.source] -type = "git" -url = "https://github.com/amacfie/sqlacodegen" -reference = "domain_check_textclause" -resolved_reference = "073c2e7105da79e53163c10d20c8ea7001ca7b66" - [[package]] name = "sqlalchemy" version = "2.0.36" @@ -2088,13 +2040,43 @@ url = ["furl (>=0.4.1)"] [[package]] name = "tomli" -version = "2.1.0" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, - {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] @@ -2110,44 +2092,25 @@ files = [ [[package]] name = "tqdm" -version = "4.67.0" +version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.67.0-py3-none-any.whl", hash = "sha256:0cd8af9d56911acab92182e88d763100d4788bdf421d251616040cc4d44863be"}, - {file = "tqdm-4.67.0.tar.gz", hash = "sha256:fe5a6f95e6fe0b9755e9469b77b9c3cf850048224ecaa8293d7d2d31f97d869a"}, + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] discord = ["requests"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] -[[package]] -name = "typeguard" -version = "4.4.1" -description = "Run-time type checker for Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21"}, - {file = "typeguard-4.4.1.tar.gz", hash = "sha256:0d22a89d00b453b47c49875f42b6601b961757541a2e1e0ef517b6e24213c21b"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -typing-extensions = ">=4.10.0" - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)"] -test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"] - [[package]] name = "typer" version = "0.7.0" @@ -2246,13 +2209,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.27.1" +version = "20.28.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.27.1-py3-none-any.whl", hash = "sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4"}, - {file = "virtualenv-20.27.1.tar.gz", hash = "sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba"}, + {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, + {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, ] [package.dependencies] @@ -2289,4 +2252,4 @@ docs = ["sphinx-rtd-theme", "sphinxcontrib-napoleon"] [metadata] lock-version = "2.0" python-versions = "^3.9,<3.13" -content-hash = "d49ad6e8894cc350891ad8b9eaa2c0d1cae82842ec9227712955fdc8fa821900" +content-hash = "c57b2c743b80129962da69ee0603e82f561f41c641bfa897eb58ffd606d43bc6" diff --git a/pyproject.toml b/pyproject.toml index 8015144a..ce3fb6fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,6 @@ smartnoise-sql = "^1" jinja2 = "^3.1.2" black = "^23.3.0" jsonschema = "^4.17.3" -sqlacodegen = { git = "https://github.com/amacfie/sqlacodegen", branch = "domain_check_textclause" } asyncpg = "^0.30.0" greenlet = "^3.1.1" pymysql = "^1.1.0" diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 0446391b..7dbb9b93 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -266,11 +266,7 @@ def make_tables( force: bool = Option(False, "--force", "-f"), verbose: bool = Option(False, "--verbose", "-v"), ) -> None: - """Make a SQLAlchemy file of Table classes. - - This CLI command deploys sqlacodegen to discover a - schema structure, and generates an object relational model declared - as Python classes. + """Make a YAML file representing the tables in the schema. Example: $ sqlsynthgen make_tables diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 16ae00a5..11a3bd49 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -72,6 +72,7 @@ class TableGeneratorInfo: class_name: str table_name: str + columns: list[str] rows_per_pass: int row_gens: list[RowGeneratorInfo] = field(default_factory=list) unique_constraints: list[UniqueConstraint] = field(default_factory=list) @@ -301,47 +302,6 @@ def _get_provider_for_column(column: Column) -> Tuple[list[str], str, dict[str, return variable_names, generator_function, generator_arguments -def _enforce_unique_constraints(table_data: TableGeneratorInfo) -> None: - """Wrap row generators of `table_data` in `UniqueGenerator`s to enforce constraints. - - The given `table_data` is modified in place. - """ - # For each row generator that assigns values to a column that has a unique - # constraint, wrap it in a UniqueGenerator that ensures the values generated are - # unique. - for row_gen in table_data.row_gens: - # Set of column names that this row_gen assigns to. - row_gen_column_set = set(row_gen.variable_names) - for constraint in table_data.unique_constraints: - # Set of column names that this constraint affects. - constraint_column_set = set(c.name for c in constraint.columns) - if not constraint_column_set & row_gen_column_set: - # The intersection is empty, this constraint isn't relevant for this - # row_gen. - continue - if not constraint_column_set.issubset(row_gen_column_set): - msg = ( - "A unique constraint (%s) isn't fully covered by one row " - "generator (%s). Enforcement of the constraint may not work." - ) - logger.warning(msg, constraint.name, row_gen.variable_names) - - # Make a new function call that wraps the old one in a UniqueGenerator - old_function_call = row_gen.function_call - new_arguments = [ - "dst_db_conn", - str(row_gen.variable_names), - old_function_call.function_name, - ] + old_function_call.argument_values - # The self.unique_{constraint_name} will be a UniqueGenerator, initialized - # in the __init__ of the table generator. - new_function_call = FunctionCall( - function_name=f"self.unique_{constraint.name}", - argument_values=new_arguments, - ) - row_gen.function_call = new_function_call - - def _constraint_sort_key(constraint: UniqueConstraint) -> str: """Extract a string out of a UniqueConstraint that is unique to that constraint. @@ -390,6 +350,7 @@ def _get_generator_for_table( table_data: TableGeneratorInfo = TableGeneratorInfo( table_name=table.name, class_name=table.name.title() + "Generator", + columns=[str(col.name) for col in table.columns], rows_per_pass=table_config.get("num_rows_per_pass", 1), unique_constraints=unique_constraints, ) @@ -402,7 +363,6 @@ def _get_generator_for_table( # No generator for this column in the user config. table_data.row_gens.append(_get_default_generator(column)) - _enforce_unique_constraints(table_data) return table_data @@ -433,7 +393,11 @@ def make_table_generators( # pylint: disable=too-many-locals src_stats_filename: Optional[str], overwrite_files: bool = False, ) -> str: - """Create sqlsynthgen generator classes from a sqlacodegen-generated file. + """Create sqlsynthgen generator classes. + + Currently the SRC_DSN environment variable is still required + so that vocabulary tables can be generated. This will be removed + into a different command. Args: config: Configuration to control the generator creation. @@ -525,9 +489,8 @@ def _get_generator_for_vocabulary_table( def make_tables_file( db_dsn: str, schema_name: Optional[str], config: Mapping[str, Any] ) -> str: - """Write a file with the SQLAlchemy ORM classes. - - Exits with an error if sqlacodegen is unsuccessful. + """ + Construct the YAML file representing the schema. """ tables_config = config.get("tables", {}) engine = get_sync_engine(create_db_engine(db_dsn, schema_name=schema_name)) @@ -537,7 +500,6 @@ def reflect_if(table_name: str, _: Any) -> bool: ignore = table_config.get("ignore", False) return not ignore - schemae = sqlalchemy.inspect(engine).get_schema_names() metadata = MetaData() metadata.reflect( engine, diff --git a/sqlsynthgen/serialize_metadata.py b/sqlsynthgen/serialize_metadata.py index 33e0d580..87a40669 100644 --- a/sqlsynthgen/serialize_metadata.py +++ b/sqlsynthgen/serialize_metadata.py @@ -59,15 +59,17 @@ def time_type(type_, pg_type): @parsy.generate(type_.__name__) def pgt_parser(): """ - Parses TYPE_NAME, TYPE_NAME(32), TYPE_NAME WITH TIMEZONE - or TYPE_NAME(32) WITH TIMEZONE + Parses TYPE_NAME, TYPE_NAME(32), TYPE_NAME WITH TIME ZONE + or TYPE_NAME(32) WITH TIME ZONE """ yield parsy.string(type_.__name__) precision: int | None = yield ( parsy.string("(") >> integer() << parsy.string(")") ).optional() timezone: str | None = yield ( - parsy.string(" WITH TIME ZONE").result(True) + parsy.string(" WITH") >> ( + parsy.string(" ").result(True) | parsy.string("OUT ").result(False) + ) << parsy.string("TIME ZONE") ).optional(False) if precision is None and not timezone: # normal sql type diff --git a/sqlsynthgen/templates/ssg.py.j2 b/sqlsynthgen/templates/ssg.py.j2 index 27d9735c..7b0e9781 100644 --- a/sqlsynthgen/templates/ssg.py.j2 +++ b/sqlsynthgen/templates/ssg.py.j2 @@ -1,8 +1,8 @@ """This file was auto-generated by sqlsynthgen but can be edited manually.""" from mimesis import Generic, Numeric from mimesis.locales import Locale +import sqlalchemy from sqlsynthgen.base import FileUploader, TableGenerator -from sqlsynthgen.unique_generator import UniqueGenerator from sqlsynthgen.main import load_metadata generic = Generic(locale=Locale.EN_GB) @@ -37,27 +37,57 @@ class {{ table_data.class_name }}(TableGenerator): num_rows_per_pass = {{ table_data.rows_per_pass }} def __init__(self): - pass - {% for constraint in table_data.unique_constraints %} - self.unique_{{constraint.name}} = UniqueGenerator([ - {% for col in constraint.columns %} - "{{col.name}}"{%- if not loop.last %}, {% endif %} - {% endfor %} - ], - "{{table_data.table_name}}", - {% if max_unique_constraint_tries is not none %} - max_tries={{max_unique_constraint_tries}}, - {% endif %} - ) - {% endfor %} + self.initialized = False def __call__(self, dst_db_conn): + if not self.initialized: + {% for constraint in table_data.unique_constraints %} + query_text = f"SELECT {% + for col in constraint.columns + %}{{col.name}}{%- if not loop.last %}, {% endif %}{% + endfor + %} FROM {{table_data.table_name}}" + query_result = dst_db_conn.execute(sqlalchemy.text(query_text)).fetchall() + self.{{constraint.name}}_existing = set([ + hash(tuple(result)) + for result in query_result + ]) + {% endfor %} + self.initialized = True result = {} - {% for row_gen in table_data.row_gens %} - {% for vn in row_gen.variable_names %} - result["{{vn}}"]{%- if not loop.last %}, {% endif %} - {% endfor %} = {{ row_gen.function_call.function_name }}({{ row_gen.function_call.argument_values| join(", ") }}) - {% endfor %} + columns_to_generate = set({{ table_data.columns }}) + {% if max_unique_constraint_tries is not none %} + max_tries={{max_unique_constraint_tries}}, + {% endif %} + while columns_to_generate: + {% if max_unique_constraint_tries is not none %} + if max_tries == 0: + raise RuntimeError(f"Failed to satisfy unique constraints for table {{table_data.table_name}} after {{ max_unique_constraint_tries }} attempts.") + max_tries -= 1 + {% endif %} + {% for row_gen in table_data.row_gens %} + if ({% for vn in row_gen.variable_names %} + "{{vn}}" in columns_to_generate{%- if not loop.last %} or{% endif %} + {% endfor %} + ): + {% for vn in row_gen.variable_names %} + result["{{vn}}"]{%- if not loop.last %}, {% endif %} + {% endfor %} = {{ row_gen.function_call.function_name }}({{ row_gen.function_call.argument_values| join(", ") }}) + {% endfor %} + columns_to_generate = set() + {% for constraint in table_data.unique_constraints %} + {{constraint.name}}_hash = hash(( + {% for col in constraint.columns %} + result["{{col.name}}"], + {% endfor %} + )) + if {{constraint.name}}_hash not in self.{{constraint.name}}_existing: + self.{{constraint.name}}_existing.add({{constraint.name}}_hash) + else: + {% for col in constraint.columns %} + columns_to_generate.add("{{col.name}}") + {% endfor %} + {% endfor %} return result {% endfor %} From 9421fba2c270496cfb8125ec8a190099b5c3e80e Mon Sep 17 00:00:00 2001 From: Tim Band Date: Mon, 9 Dec 2024 19:37:59 +0000 Subject: [PATCH 08/85] Vocab table generation moved into new command make-vocab --- sqlsynthgen/main.py | 50 ++++++++++++++++++++++++++----- sqlsynthgen/make.py | 70 ++++++++++++++++++++++++++++++++++---------- sqlsynthgen/utils.py | 5 +++- 3 files changed, 101 insertions(+), 24 deletions(-) diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 7dbb9b93..ac8b11d9 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -4,7 +4,6 @@ import sys from importlib import metadata from pathlib import Path -from types import ModuleType from typing import Final, Optional import yaml @@ -13,13 +12,17 @@ from typer import Option, Typer from sqlsynthgen.create import create_db_data, create_db_tables, create_db_vocab -from sqlsynthgen.make import make_src_stats, make_table_generators, make_tables_file +from sqlsynthgen.make import ( + make_src_stats, + make_table_generators, + make_tables_file, + make_vocabulary_tables, +) from sqlsynthgen.remove import remove_db_data, remove_db_tables, remove_db_vocab from sqlsynthgen.settings import Settings, get_settings from sqlsynthgen.utils import ( CONFIG_SCHEMA_PATH, conf_logger, - get_orm_metadata, import_file, logger, read_config_file, @@ -174,6 +177,42 @@ def create_tables( logger.debug("Tables created.") +@app.command() +def make_vocab( + orm_file: str = Option(ORM_FILENAME), + config_file: Optional[str] = Option(None), + force: bool = Option(True, "--force", "-f"), + verbose: bool = Option(False, "--verbose", "-v"), +) -> None: + """Make files of vocabulary tables. + + Each table marked in the configuration file as "vocabulary_table: true" + + Example: + $ sqlsynthgen make-vocab --config-file config.yml + + Args: + orm_file (str): Name of Python ORM file. + Must be in the current working directory. + ssg_file (str): Path to write the generators file to. + config_file (str): Path to configuration file. + stats_file (str): Path to source stats file (output of make-stats). + force (bool): Overwrite any existing vocabulary file. Default to True. + verbose (bool): Be verbose. Default to False. + """ + conf_logger(verbose) + settings = get_settings() + _require_src_db_dsn(settings) + + generator_config = read_config_file(config_file) if config_file is not None else {} + orm_metadata = load_metadata(orm_file, generator_config) + make_vocabulary_tables( + orm_metadata, + generator_config, + overwrite_files=force + ) + + @app.command() def make_generators( orm_file: str = Option(ORM_FILENAME), @@ -197,7 +236,7 @@ def make_generators( ssg_file (str): Path to write the generators file to. config_file (str): Path to configuration file. stats_file (str): Path to source stats file (output of make-stats). - force (bool): Overwrite the ORM file if exists. Default to False. + force (bool): Overwrite the ssg.py file if exists. Defaults to False. verbose (bool): Be verbose. Default to False. """ conf_logger(verbose) @@ -207,8 +246,6 @@ def make_generators( if not force: _check_file_non_existence(ssg_file_path) settings = get_settings() - # Check that src_dsn is set, even though we don't need it here. - _require_src_db_dsn(settings) generator_config = read_config_file(config_file) if config_file is not None else {} orm_metadata = load_metadata(orm_file, generator_config) @@ -218,7 +255,6 @@ def make_generators( orm_file, config_file, stats_file, - overwrite_files=force ) ssg_file_path.write_text(result, encoding="utf-8") diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 11a3bd49..9f42c0fa 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -44,7 +44,6 @@ class VocabularyTableGeneratorInfo: """Contains the ssg.py content related to vocabulary tables.""" variable_name: str - class_name: str table_name: str dictionary_entry: str @@ -385,19 +384,41 @@ def _get_story_generators(config: Mapping) -> list[StoryGeneratorInfo]: return generators +def make_vocabulary_tables( + metadata: MetaData, + config: Mapping, + overwrite_files: bool, +): + """ + Extracts the data from the source database for each + vocabulary table. + """ + settings = get_settings() + src_dsn: str = settings.src_dsn or "" + assert src_dsn != "", "Missing SRC_DSN setting." + + engine = get_sync_engine(create_db_engine(src_dsn, schema_name=settings.src_schema)) + tables_config = config.get("tables", {}) + for table in metadata.sorted_tables: + table_config = tables_config.get(table.name, {}) + + if table_config.get("vocabulary_table", False): + _generate_vocabulary_table( + table, engine, overwrite_files=overwrite_files + ) + + def make_table_generators( # pylint: disable=too-many-locals metadata: MetaData, config: Mapping, orm_filename: str, config_filename: str, src_stats_filename: Optional[str], - overwrite_files: bool = False, ) -> str: """Create sqlsynthgen generator classes. - Currently the SRC_DSN environment variable is still required - so that vocabulary tables can be generated. This will be removed - into a different command. + The orm and vocabulary YAML files must already have been + generated (by make-tables and make-vocab). Args: config: Configuration to control the generator creation. @@ -423,10 +444,10 @@ def make_table_generators( # pylint: disable=too-many-locals for table in metadata.sorted_tables: table_config = tables_config.get(table.name, {}) - if table_config.get("vocabulary_table") is True: + if table_config.get("vocabulary_table", False): vocabulary_tables.append( - _get_generator_for_vocabulary_table( - table, engine, overwrite_files=overwrite_files + _get_generator_for_existing_vocabulary_table( + table, engine ) ) else: @@ -464,20 +485,21 @@ def generate_ssg_content(template_context: Mapping[str, Any]) -> str: return format_str(template_output, mode=FileMode()) -def _get_generator_for_vocabulary_table( +def _get_generator_for_existing_vocabulary_table( table: Table, engine: Engine, table_file_name: Optional[str] = None, - overwrite_files: bool = False, ) -> VocabularyTableGeneratorInfo: + """ + Turns an existing vocabulary YAML file into a VocabularyTableGeneratorInfo. + """ yaml_file_name: str = table_file_name or table.fullname + ".yaml" - if Path(yaml_file_name).exists() and not overwrite_files: - logger.error("%s already exists. Exiting...", yaml_file_name) + if not Path(yaml_file_name).exists(): + logger.error("%s has not already been generated, please run make-vocab first", yaml_file_name) sys.exit(1) - else: - logger.debug("Downloading vocabulary table %s", table.name) - download_table(table, engine, yaml_file_name) - logger.debug("Done downloading %s", table.name) + logger.debug("Downloading vocabulary table %s", table.name) + download_table(table, engine, yaml_file_name) + logger.debug("Done downloading %s", table.name) return VocabularyTableGeneratorInfo( dictionary_entry=table.name, @@ -486,6 +508,22 @@ def _get_generator_for_vocabulary_table( ) +def _generate_vocabulary_table( + table: Table, + engine: Engine, + overwrite_files: bool = False, +): + """ + Pulls data out of the source database to make a vocabulary YAML file + """ + yaml_file_name: str = table.fullname + ".yaml" + if Path(yaml_file_name).exists() and not overwrite_files: + logger.debug("%s already exists; not overwriting", yaml_file_name) + return + logger.debug("Downloading vocabulary table %s", table.name) + download_table(table, engine, yaml_file_name) + + def make_tables_file( db_dsn: str, schema_name: Optional[str], config: Mapping[str, Any] ) -> str: diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index ce16ec53..d63dc965 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -77,7 +77,10 @@ def download_table( """Download a Table and store it as a .yaml file.""" stmt = select(table) with engine.connect() as conn: - result = [dict(row) for row in conn.execute(stmt).mappings()] + result = [ + {str(col_name): value for (col_name, value) in row.items()} + for row in conn.execute(stmt).mappings() + ] with Path(yaml_file_name).open("w", newline="", encoding="utf-8") as yamlfile: yamlfile.write(yaml.dump(result)) From 9ea0c1e826e2abcb0bb70aede04624dde88695c7 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 11 Dec 2024 11:23:42 +0000 Subject: [PATCH 09/85] Using Typer more conventially now --- poetry.lock | 131 ++++++++++++++++++------------------ pyproject.toml | 2 +- sqlsynthgen/main.py | 159 ++++++++++++++++---------------------------- sqlsynthgen/make.py | 1 - 4 files changed, 124 insertions(+), 169 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0f73d850..334264a4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -23,13 +23,13 @@ files = [ [[package]] name = "astroid" -version = "3.3.5" +version = "3.3.6" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" files = [ - {file = "astroid-3.3.5-py3-none-any.whl", hash = "sha256:a9d1c946ada25098d790e079ba2a1b112157278f3fb7e718ae6a9252f5835dc8"}, - {file = "astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d"}, + {file = "astroid-3.3.6-py3-none-any.whl", hash = "sha256:db676dc4f3ae6bfe31cda227dc60e03438378d7a896aec57422c95634e8d722f"}, + {file = "astroid-3.3.6.tar.gz", hash = "sha256:6aaea045f938c735ead292204afdb977a36e989522b7833ef6fea94de743f442"}, ] [package.dependencies] @@ -941,66 +941,66 @@ files = [ [[package]] name = "numpy" -version = "2.1.3" +version = "2.2.0" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" files = [ - {file = "numpy-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c894b4305373b9c5576d7a12b473702afdf48ce5369c074ba304cc5ad8730dff"}, - {file = "numpy-2.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b47fbb433d3260adcd51eb54f92a2ffbc90a4595f8970ee00e064c644ac788f5"}, - {file = "numpy-2.1.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:825656d0743699c529c5943554d223c021ff0494ff1442152ce887ef4f7561a1"}, - {file = "numpy-2.1.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:6a4825252fcc430a182ac4dee5a505053d262c807f8a924603d411f6718b88fd"}, - {file = "numpy-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e711e02f49e176a01d0349d82cb5f05ba4db7d5e7e0defd026328e5cfb3226d3"}, - {file = "numpy-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78574ac2d1a4a02421f25da9559850d59457bac82f2b8d7a44fe83a64f770098"}, - {file = "numpy-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c7662f0e3673fe4e832fe07b65c50342ea27d989f92c80355658c7f888fcc83c"}, - {file = "numpy-2.1.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fa2d1337dc61c8dc417fbccf20f6d1e139896a30721b7f1e832b2bb6ef4eb6c4"}, - {file = "numpy-2.1.3-cp310-cp310-win32.whl", hash = "sha256:72dcc4a35a8515d83e76b58fdf8113a5c969ccd505c8a946759b24e3182d1f23"}, - {file = "numpy-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:ecc76a9ba2911d8d37ac01de72834d8849e55473457558e12995f4cd53e778e0"}, - {file = "numpy-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4d1167c53b93f1f5d8a139a742b3c6f4d429b54e74e6b57d0eff40045187b15d"}, - {file = "numpy-2.1.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c80e4a09b3d95b4e1cac08643f1152fa71a0a821a2d4277334c88d54b2219a41"}, - {file = "numpy-2.1.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:576a1c1d25e9e02ed7fa5477f30a127fe56debd53b8d2c89d5578f9857d03ca9"}, - {file = "numpy-2.1.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:973faafebaae4c0aaa1a1ca1ce02434554d67e628b8d805e61f874b84e136b09"}, - {file = "numpy-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:762479be47a4863e261a840e8e01608d124ee1361e48b96916f38b119cfda04a"}, - {file = "numpy-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc6f24b3d1ecc1eebfbf5d6051faa49af40b03be1aaa781ebdadcbc090b4539b"}, - {file = "numpy-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:17ee83a1f4fef3c94d16dc1802b998668b5419362c8a4f4e8a491de1b41cc3ee"}, - {file = "numpy-2.1.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15cb89f39fa6d0bdfb600ea24b250e5f1a3df23f901f51c8debaa6a5d122b2f0"}, - {file = "numpy-2.1.3-cp311-cp311-win32.whl", hash = "sha256:d9beb777a78c331580705326d2367488d5bc473b49a9bc3036c154832520aca9"}, - {file = "numpy-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:d89dd2b6da69c4fff5e39c28a382199ddedc3a5be5390115608345dec660b9e2"}, - {file = "numpy-2.1.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f55ba01150f52b1027829b50d70ef1dafd9821ea82905b63936668403c3b471e"}, - {file = "numpy-2.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13138eadd4f4da03074851a698ffa7e405f41a0845a6b1ad135b81596e4e9958"}, - {file = "numpy-2.1.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a6b46587b14b888e95e4a24d7b13ae91fa22386c199ee7b418f449032b2fa3b8"}, - {file = "numpy-2.1.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:0fa14563cc46422e99daef53d725d0c326e99e468a9320a240affffe87852564"}, - {file = "numpy-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8637dcd2caa676e475503d1f8fdb327bc495554e10838019651b76d17b98e512"}, - {file = "numpy-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2312b2aa89e1f43ecea6da6ea9a810d06aae08321609d8dc0d0eda6d946a541b"}, - {file = "numpy-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a38c19106902bb19351b83802531fea19dee18e5b37b36454f27f11ff956f7fc"}, - {file = "numpy-2.1.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02135ade8b8a84011cbb67dc44e07c58f28575cf9ecf8ab304e51c05528c19f0"}, - {file = "numpy-2.1.3-cp312-cp312-win32.whl", hash = "sha256:e6988e90fcf617da2b5c78902fe8e668361b43b4fe26dbf2d7b0f8034d4cafb9"}, - {file = "numpy-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:0d30c543f02e84e92c4b1f415b7c6b5326cbe45ee7882b6b77db7195fb971e3a"}, - {file = "numpy-2.1.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96fe52fcdb9345b7cd82ecd34547fca4321f7656d500eca497eb7ea5a926692f"}, - {file = "numpy-2.1.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f653490b33e9c3a4c1c01d41bc2aef08f9475af51146e4a7710c450cf9761598"}, - {file = "numpy-2.1.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dc258a761a16daa791081d026f0ed4399b582712e6fc887a95af09df10c5ca57"}, - {file = "numpy-2.1.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:016d0f6f5e77b0f0d45d77387ffa4bb89816b57c835580c3ce8e099ef830befe"}, - {file = "numpy-2.1.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c181ba05ce8299c7aa3125c27b9c2167bca4a4445b7ce73d5febc411ca692e43"}, - {file = "numpy-2.1.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5641516794ca9e5f8a4d17bb45446998c6554704d888f86df9b200e66bdcce56"}, - {file = "numpy-2.1.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ea4dedd6e394a9c180b33c2c872b92f7ce0f8e7ad93e9585312b0c5a04777a4a"}, - {file = "numpy-2.1.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0df3635b9c8ef48bd3be5f862cf71b0a4716fa0e702155c45067c6b711ddcef"}, - {file = "numpy-2.1.3-cp313-cp313-win32.whl", hash = "sha256:50ca6aba6e163363f132b5c101ba078b8cbd3fa92c7865fd7d4d62d9779ac29f"}, - {file = "numpy-2.1.3-cp313-cp313-win_amd64.whl", hash = "sha256:747641635d3d44bcb380d950679462fae44f54b131be347d5ec2bce47d3df9ed"}, - {file = "numpy-2.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:996bb9399059c5b82f76b53ff8bb686069c05acc94656bb259b1d63d04a9506f"}, - {file = "numpy-2.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:45966d859916ad02b779706bb43b954281db43e185015df6eb3323120188f9e4"}, - {file = "numpy-2.1.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:baed7e8d7481bfe0874b566850cb0b85243e982388b7b23348c6db2ee2b2ae8e"}, - {file = "numpy-2.1.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f7f672a3388133335589cfca93ed468509cb7b93ba3105fce780d04a6576a0"}, - {file = "numpy-2.1.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7aac50327da5d208db2eec22eb11e491e3fe13d22653dce51b0f4109101b408"}, - {file = "numpy-2.1.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4394bc0dbd074b7f9b52024832d16e019decebf86caf909d94f6b3f77a8ee3b6"}, - {file = "numpy-2.1.3-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:50d18c4358a0a8a53f12a8ba9d772ab2d460321e6a93d6064fc22443d189853f"}, - {file = "numpy-2.1.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:14e253bd43fc6b37af4921b10f6add6925878a42a0c5fe83daee390bca80bc17"}, - {file = "numpy-2.1.3-cp313-cp313t-win32.whl", hash = "sha256:08788d27a5fd867a663f6fc753fd7c3ad7e92747efc73c53bca2f19f8bc06f48"}, - {file = "numpy-2.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2564fbdf2b99b3f815f2107c1bbc93e2de8ee655a69c261363a1172a79a257d4"}, - {file = "numpy-2.1.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4f2015dfe437dfebbfce7c85c7b53d81ba49e71ba7eadbf1df40c915af75979f"}, - {file = "numpy-2.1.3-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:3522b0dfe983a575e6a9ab3a4a4dfe156c3e428468ff08ce582b9bb6bd1d71d4"}, - {file = "numpy-2.1.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c006b607a865b07cd981ccb218a04fc86b600411d83d6fc261357f1c0966755d"}, - {file = "numpy-2.1.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e14e26956e6f1696070788252dcdff11b4aca4c3e8bd166e0df1bb8f315a67cb"}, - {file = "numpy-2.1.3.tar.gz", hash = "sha256:aa08e04e08aaf974d4458def539dece0d28146d866a39da5639596f4921fd761"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, + {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, + {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, + {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, + {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, + {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, + {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, + {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, + {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, + {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, + {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, + {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, ] [[package]] @@ -2113,23 +2113,24 @@ telegram = ["requests"] [[package]] name = "typer" -version = "0.7.0" +version = "0.9.4" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.6" files = [ - {file = "typer-0.7.0-py3-none-any.whl", hash = "sha256:b5e704f4e48ec263de1c0b3a2387cd405a13767d2f907f44c1a08cbad96f606d"}, - {file = "typer-0.7.0.tar.gz", hash = "sha256:ff797846578a9f2a201b53442aedeb543319466870fbe1c701eab66dd7681165"}, + {file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"}, + {file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"}, ] [package.dependencies] click = ">=7.1.1,<9.0.0" +typing-extensions = ">=3.7.4.3" [package.extras] -all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] [[package]] name = "types-docutils" @@ -2252,4 +2253,4 @@ docs = ["sphinx-rtd-theme", "sphinxcontrib-napoleon"] [metadata] lock-version = "2.0" python-versions = "^3.9,<3.13" -content-hash = "c57b2c743b80129962da69ee0603e82f561f41c641bfa897eb58ffd606d43bc6" +content-hash = "2240d60cdeb415bf60f58ada560652991673f18581bbe16f085cb490ec26a711" diff --git a/pyproject.toml b/pyproject.toml index ce3fb6fd..b56cc864 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ pydantic = {extras = ["dotenv"], version = "^1.10.2"} psycopg2-binary = "^2.9.5" sqlalchemy-utils = "^0.41.2" mimesis = "^6.1.1" -typer = "^0.7.0" +typer = "^0.9.0" pyyaml = "^6.0" sqlalchemy = "^2" sphinx-rtd-theme = {version = "^1.2.0", optional = true} diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index ac8b11d9..8daa41c1 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -9,7 +9,7 @@ import yaml from jsonschema.exceptions import ValidationError from jsonschema.validators import validate -from typer import Option, Typer +from typer import Argument, Option, Typer from sqlsynthgen.create import create_db_data, create_db_tables, create_db_vocab from sqlsynthgen.make import ( @@ -67,15 +67,27 @@ def load_metadata(orm_file_name, config=None): return dict_to_metadata(meta_dict) +@app.callback() +def main(verbose: bool = Option( + False, + "--verbose", + "-v", + help="Print more information." +)): + conf_logger(verbose) + + @app.command() def create_data( - orm_file: str = Option(ORM_FILENAME), - ssg_file: str = Option(SSG_FILENAME), - config_file: Optional[str] = Option(None), - num_passes: int = Option(1), - verbose: bool = Option(False, "--verbose", "-v"), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), + ssg_file: str = Option( + SSG_FILENAME, + help="The name of the generators file. Must be in the current working directory." + ), + config_file: Optional[str] = Option(None, help="The configuration file"), + num_passes: int = Option(1, help="Number of passes (rows or stories) to make"), ) -> None: - """Populate schema with synthetic data. + """Populate the schema in the target directory with synthetic data. This CLI command generates synthetic data for Python table structures, and inserts these rows @@ -92,17 +104,7 @@ def create_data( Example: $ sqlsynthgen create-data - - Args: - orm_file (str): Name of YAML ORM file. - Must be in the current working directory. - ssg_file (str): Name of generators file. - Must be in the current working directory. - config_file (str): Path to configuration file. - num_passes (int): Number of passes to make. - verbose (bool): Be verbose. Default to False. """ - conf_logger(verbose) logger.debug("Creating data.") orm_metadata = load_metadata(orm_file, config_file) ssg_module = import_file(ssg_file) @@ -128,20 +130,16 @@ def create_data( @app.command() def create_vocab( - ssg_file: str = Option(SSG_FILENAME), - verbose: bool = Option(False, "--verbose", "-v"), + ssg_file: str = Option( + SSG_FILENAME, + help="The name of the generators file. Must be in the current working directory." + ), ) -> None: - """Import vocabulary data. + """Import vocabulary data into the target database. Example: $ sqlsynthgen create-vocab - - Args: - ssg_file (str): Name of generators file. - Must be in the current working directory. - verbose (bool): Be verbose. Default to False. """ - conf_logger(verbose) logger.debug("Loading vocab.") ssg_module = import_file(ssg_file) create_db_vocab(ssg_module.vocab_dict) @@ -151,9 +149,8 @@ def create_vocab( @app.command() def create_tables( - orm_file: str = Option(ORM_FILENAME), - config_file: Optional[str] = Option(None), - verbose: bool = Option(False, "--verbose", "-v"), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), + config_file: Optional[str] = Option(None, help="The configuration file"), ) -> None: """Create schema from the ORM YAML file. @@ -162,14 +159,7 @@ def create_tables( Example: $ sqlsynthgen create-tables - - Args: - orm_file (str): Name of Python ORM file. - Must be in the current working directory. - config_file (str): Path to configuration file. - verbose (bool): Be verbose. Default to False. """ - conf_logger(verbose) logger.debug("Creating tables.") config = read_config_file(config_file) if config_file is not None else {} orm_metadata = load_metadata(orm_file, config) @@ -179,10 +169,9 @@ def create_tables( @app.command() def make_vocab( - orm_file: str = Option(ORM_FILENAME), - config_file: Optional[str] = Option(None), - force: bool = Option(True, "--force", "-f"), - verbose: bool = Option(False, "--verbose", "-v"), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), + config_file: Optional[str] = Option(None, help="The configuration file"), + force: bool = Option(True, help="Overwrite any existing vocabulary file."), ) -> None: """Make files of vocabulary tables. @@ -190,17 +179,7 @@ def make_vocab( Example: $ sqlsynthgen make-vocab --config-file config.yml - - Args: - orm_file (str): Name of Python ORM file. - Must be in the current working directory. - ssg_file (str): Path to write the generators file to. - config_file (str): Path to configuration file. - stats_file (str): Path to source stats file (output of make-stats). - force (bool): Overwrite any existing vocabulary file. Default to True. - verbose (bool): Be verbose. Default to False. """ - conf_logger(verbose) settings = get_settings() _require_src_db_dsn(settings) @@ -215,12 +194,11 @@ def make_vocab( @app.command() def make_generators( - orm_file: str = Option(ORM_FILENAME), - ssg_file: str = Option(SSG_FILENAME), - config_file: Optional[str] = Option(None), - stats_file: Optional[str] = Option(None), - force: bool = Option(False, "--force", "-f"), - verbose: bool = Option(False, "--verbose", "-v"), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), + ssg_file: str = Option(SSG_FILENAME, help="Path to write Python generators to."), + config_file: Optional[str] = Option(None, help="The configuration file"), + stats_file: Optional[str] = Option(None, help="Statistics file (output of make-stats)"), + force: bool = Option(False, help="Overwrite any existing Python generators file."), ) -> None: """Make a SQLSynthGen file of generator classes. @@ -229,23 +207,12 @@ def make_generators( Example: $ sqlsynthgen make-generators - - Args: - orm_file (str): Name of Python ORM file. - Must be in the current working directory. - ssg_file (str): Path to write the generators file to. - config_file (str): Path to configuration file. - stats_file (str): Path to source stats file (output of make-stats). - force (bool): Overwrite the ssg.py file if exists. Defaults to False. - verbose (bool): Be verbose. Default to False. """ - conf_logger(verbose) logger.debug("Making %s.", ssg_file) ssg_file_path = Path(ssg_file) if not force: _check_file_non_existence(ssg_file_path) - settings = get_settings() generator_config = read_config_file(config_file) if config_file is not None else {} orm_metadata = load_metadata(orm_file, generator_config) @@ -264,10 +231,9 @@ def make_generators( @app.command() def make_stats( - config_file: str = Option(...), + config_file: Optional[str] = Option(None, help="The configuration file"), stats_file: str = Option(STATS_FILENAME), - force: bool = Option(False, "--force", "-f"), - verbose: bool = Option(False, "--verbose", "-v"), + force: bool = Option(False, help="Overwrite any existing vocabulary file."), ) -> None: """Compute summary statistics from the source database. @@ -276,7 +242,6 @@ def make_stats( Example: $ sqlsynthgen make_stats --config-file=example_config.yaml """ - conf_logger(verbose) logger.debug("Creating %s.", stats_file) stats_file_path = Path(stats_file) @@ -297,23 +262,15 @@ def make_stats( @app.command() def make_tables( - config_file: Optional[str] = Option(None), - orm_file: str = Option(ORM_FILENAME), - force: bool = Option(False, "--force", "-f"), - verbose: bool = Option(False, "--verbose", "-v"), + config_file: Optional[str] = Option(None, help="The configuration file"), + orm_file: str = Option(ORM_FILENAME, help="Path to write the ORM yaml file to"), + force: bool = Option(False, help="Overwrite any existing orm yaml file."), ) -> None: """Make a YAML file representing the tables in the schema. Example: $ sqlsynthgen make_tables - - Args: - config_file (str): Path to configuration file. - orm_file (str): Path to write the Python YAML file. - force (bool): Overwrite YAML file, if exists. Default to False. - verbose (bool): Be verbose. Default to False. """ - conf_logger(verbose) logger.debug("Creating %s.", orm_file) orm_file_path = Path(orm_file) @@ -331,11 +288,9 @@ def make_tables( @app.command() def validate_config( - config_file: Path, - verbose: bool = Option(False, "--verbose", "-v"), + config_file: Path = Argument(help="The configuration file to validate"), ) -> None: """Validate the format of a config file.""" - conf_logger(verbose) logger.debug("Validating config file: %s.", config_file) config = yaml.load(config_file.read_text(encoding="UTF-8"), Loader=yaml.SafeLoader) @@ -350,14 +305,15 @@ def validate_config( @app.command() def remove_data( - orm_file: str = Option(ORM_FILENAME), - ssg_file: str = Option(SSG_FILENAME), - config_file: Optional[str] = Option(None), - yes: bool = Option(False, "--yes", prompt="Are you sure?"), - verbose: bool = Option(False, "--verbose", "-v"), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), + ssg_file: str = Option( + SSG_FILENAME, + help="The name of the generators file. Must be in the current working directory." + ), + config_file: Optional[str] = Option(None, help="The configuration file"), + yes: bool = Option(False, "--yes", prompt="Are you sure?", help="Just remove, don't ask first"), ) -> None: """Truncate non-vocabulary tables in the destination schema.""" - conf_logger(verbose) if yes: logger.debug("Truncating non-vocabulary tables.") config = read_config_file(config_file) if config_file is not None else {} @@ -371,14 +327,15 @@ def remove_data( @app.command() def remove_vocab( - orm_file: str = Option(ORM_FILENAME), - ssg_file: str = Option(SSG_FILENAME), - config_file: Optional[str] = Option(None), - yes: bool = Option(False, "--yes", prompt="Are you sure?"), - verbose: bool = Option(False, "--verbose", "-v"), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), + ssg_file: str = Option( + SSG_FILENAME, + help="The name of the generators file. Must be in the current working directory." + ), + config_file: Optional[str] = Option(None, help="The configuration file"), + yes: bool = Option(False, "--yes", prompt="Are you sure?", help="Just remove, don't ask first"), ) -> None: """Truncate vocabulary tables in the destination schema.""" - conf_logger(verbose) if yes: logger.debug("Truncating vocabulary tables.") config = read_config_file(config_file) if config_file is not None else {} @@ -392,16 +349,14 @@ def remove_vocab( @app.command() def remove_tables( - orm_file: str = Option(ORM_FILENAME), - config_file: Optional[str] = Option(None), - yes: bool = Option(False, "--yes", prompt="Are you sure?"), - verbose: bool = Option(False, "--verbose", "-v"), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), + config_file: Optional[str] = Option(None, help="The configuration file"), + yes: bool = Option(False, "--yes", prompt="Are you sure?", help="Just remove, don't ask first"), ) -> None: """Drop all tables in the destination schema. Does not drop the schema itself. """ - conf_logger(verbose) if yes: logger.debug("Dropping tables.") config = read_config_file(config_file) if config_file is not None else {} diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 9f42c0fa..24617bb7 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -12,7 +12,6 @@ from black import FileMode, format_str from jinja2 import Environment, FileSystemLoader, Template from mimesis.providers.base import BaseProvider -import sqlalchemy from sqlalchemy import Engine, MetaData, UniqueConstraint, text from sqlalchemy.dialects import postgresql from sqlalchemy.ext.asyncio import AsyncEngine From 2b1a9ab1bbfa99ce476745f27e3ff806ecb5bf0d Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 18 Dec 2024 12:26:45 +0000 Subject: [PATCH 10/85] Vocabulary tables can now be created even with circular dependencies. --- docs/source/introduction.rst | 2 +- sqlsynthgen/create.py | 92 ++++++++++++++++++++++++++----- sqlsynthgen/main.py | 33 ++++++----- sqlsynthgen/make.py | 46 +++++++++++----- sqlsynthgen/serialize_metadata.py | 15 +++-- sqlsynthgen/utils.py | 15 +++++ 6 files changed, 157 insertions(+), 46 deletions(-) diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst index 8b584a98..9fd3408e 100644 --- a/docs/source/introduction.rst +++ b/docs/source/introduction.rst @@ -162,7 +162,7 @@ This will export the ``countries`` table rows to a file called ``countries.yaml` We need to truncate any tables in our destination database before importing the countries data with:: $ sqlsynthgen remove-data --config-file config.yaml - $ sqlsynthgen create-vocab + $ sqlsynthgen create-vocab --config-file config.yaml --orm-file orm.yaml Since ``make-generators`` rewrote ``ssg.py``, we must now re-edit it to add the primary key ``VARCHAR`` workarounds for the ``users`` and ``age_gender_bkts`` tables, as we did in section above. Once this is done, we can generate random data for the other three tables with:: diff --git a/sqlsynthgen/create.py b/sqlsynthgen/create.py index 091e7f78..59b00ffe 100644 --- a/sqlsynthgen/create.py +++ b/sqlsynthgen/create.py @@ -2,13 +2,26 @@ from collections import Counter from typing import Any, Generator, Mapping, Sequence, Tuple -from sqlalchemy import Connection, insert -from sqlalchemy.exc import IntegrityError -from sqlalchemy.schema import CreateSchema, MetaData, Table - +from psycopg2.errors import UndefinedObject +from sqlalchemy import Connection, ForeignKeyConstraint, insert +from sqlalchemy.exc import IntegrityError, ProgrammingError +from sqlalchemy.orm import Session +from sqlalchemy.schema import ( + AddConstraint, + CreateSchema, + DropConstraint, + MetaData, + Table, +) from sqlsynthgen.base import FileUploader, TableGenerator from sqlsynthgen.settings import get_settings -from sqlsynthgen.utils import create_db_engine, get_sync_engine, logger +from sqlsynthgen.utils import ( + create_db_engine, + get_sync_engine, + get_vocabulary_table_names, + logger, + make_foreign_key_name, +) Story = Generator[Tuple[str, dict[str, Any]], dict[str, Any], None] RowCounts = Counter[str] @@ -35,8 +48,15 @@ def create_db_tables(metadata: MetaData) -> None: metadata.create_all(engine) -def create_db_vocab(vocab_dict: Mapping[str, FileUploader]) -> None: - """Load vocabulary tables from files.""" +def create_db_vocab(metadata: MetaData, meta_dict: dict[str, Any], config: Mapping) -> int: + """ + Load vocabulary tables from files. + + arguments: + metadata: The schema of the database + meta_dict: The simple description of the schema from --orm-file + config: The configuration from --config-file + """ settings = get_settings() dst_dsn: str = settings.dst_dsn or "" assert dst_dsn != "", "Missing DST_DSN setting." @@ -45,13 +65,57 @@ def create_db_vocab(vocab_dict: Mapping[str, FileUploader]) -> None: create_db_engine(dst_dsn, schema_name=settings.dst_schema) ) - with dst_engine.connect() as dst_conn: - for vocab_table in vocab_dict.values(): - logger.debug("Loading vocabulary table %s", vocab_table.table.name) - try: - vocab_table.load(dst_conn) - except IntegrityError: - logger.exception("Loading the vocabulary table %s failed:", vocab_table) + tables_loaded: list[str] = [] + + vocab_tables = get_vocabulary_table_names(config) + for vocab_table_name in vocab_tables: + vocab_table = metadata.tables[vocab_table_name] + # Remove foreign key constraints from the table + for fk in vocab_table.foreign_key_constraints: + logger.debug("Dropping constraint %s from table %s", fk.name, vocab_table_name) + with Session(dst_engine) as session: + session.begin() + try: + session.execute(DropConstraint(fk)) + except IntegrityError: + session.rollback() + logger.exception("Dropping table %s key constraint %s failed:", vocab_table_name, fk.name) + except ProgrammingError as e: + session.rollback() + if type(e.orig) is UndefinedObject: + logger.debug("Constraint does not exist") + else: + raise e + # Load data into the table + try: + logger.debug("Loading vocabulary table %s", vocab_table_name) + uploader = FileUploader(table=vocab_table) + with Session(dst_engine) as session: + session.begin() + uploader.load(session.connection()) + session.commit() + tables_loaded.append(vocab_table_name) + except IntegrityError: + logger.exception("Loading the vocabulary table %s failed:", vocab_table) + # Now we add the constraints back to all the tables + for vocab_table_name in vocab_tables: + try: + for (column_name, column_dict) in meta_dict["tables"][vocab_table_name]["columns"].items(): + fk_targets = column_dict.get("foreign_keys", []) + if fk_targets: + fk = ForeignKeyConstraint( + columns=[column_name], + name=make_foreign_key_name(vocab_table_name, column_name), + refcolumns=fk_targets, + ) + with Session(dst_engine) as session: + session.begin() + vocab_table.append_constraint(fk) + session.execute(AddConstraint(fk)) + session.commit() + except IntegrityError: + logger.exception("Restoring table %s foreign keys failed:", vocab_table) + return tables_loaded def create_db_data( diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 8daa41c1..3896a7f4 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -57,14 +57,21 @@ def _require_src_db_dsn(settings: Settings) -> str: return src_dsn -def load_metadata(orm_file_name, config=None): - if config is not None and "tables" in config: - tables_config = config["tables"] - # Remove tables_config..ignore - #... +def load_metadata_config(orm_file_name, config=None): with open(orm_file_name) as orm_fh: meta_dict = yaml.load(orm_fh, yaml.Loader) - return dict_to_metadata(meta_dict) + tables_dict = meta_dict.get("tables", {}) + if config is not None and "tables" in config: + # Remove ignored tables + for (name, table_config) in config.get("tables", {}).items(): + if table_config.get("ignore", False): + tables_dict.pop(name, None) + return meta_dict + + +def load_metadata(orm_file_name, config=None): + meta_dict = load_metadata_config(orm_file_name, config) + return dict_to_metadata(meta_dict) @app.callback() @@ -130,10 +137,8 @@ def create_data( @app.command() def create_vocab( - ssg_file: str = Option( - SSG_FILENAME, - help="The name of the generators file. Must be in the current working directory." - ), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), + config_file: str = Option(None, help="The configuration file"), ) -> None: """Import vocabulary data into the target database. @@ -141,9 +146,11 @@ def create_vocab( $ sqlsynthgen create-vocab """ logger.debug("Loading vocab.") - ssg_module = import_file(ssg_file) - create_db_vocab(ssg_module.vocab_dict) - num_vocabs = len(ssg_module.vocab_dict) + config = read_config_file(config_file) if config_file is not None else {} + meta_dict = load_metadata_config(orm_file, config) + orm_metadata = dict_to_metadata(meta_dict) + vocabs_loaded = create_db_vocab(orm_metadata, meta_dict, config) + num_vocabs = len(vocabs_loaded) logger.debug("%s %s loaded.", num_vocabs, "table" if num_vocabs == 1 else "tables") diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 24617bb7..65a4d7af 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -24,6 +24,7 @@ create_db_engine, download_table, get_sync_engine, + get_vocabulary_table_names, logger, ) @@ -383,6 +384,16 @@ def _get_story_generators(config: Mapping) -> list[StoryGeneratorInfo]: return generators +def _get_related_table_names(table: Table) -> set[str]: + """ + Get the names of all tables for which there exist foreign keys from this table. + """ + return { + str(fk.referred_table.name) + for fk in table.foreign_key_constraints + } + + def make_vocabulary_tables( metadata: MetaData, config: Mapping, @@ -397,14 +408,11 @@ def make_vocabulary_tables( assert src_dsn != "", "Missing SRC_DSN setting." engine = get_sync_engine(create_db_engine(src_dsn, schema_name=settings.src_schema)) - tables_config = config.get("tables", {}) - for table in metadata.sorted_tables: - table_config = tables_config.get(table.name, {}) - - if table_config.get("vocabulary_table", False): - _generate_vocabulary_table( - table, engine, overwrite_files=overwrite_files - ) + vocab_names = get_vocabulary_table_names(config) + for table_name in vocab_names: + _generate_vocabulary_table( + metadata.tables[table_name], engine, overwrite_files=overwrite_files + ) def make_table_generators( # pylint: disable=too-many-locals @@ -414,7 +422,8 @@ def make_table_generators( # pylint: disable=too-many-locals config_filename: str, src_stats_filename: Optional[str], ) -> str: - """Create sqlsynthgen generator classes. + """ + Create sqlsynthgen generator classes. The orm and vocabulary YAML files must already have been generated (by make-tables and make-vocab). @@ -440,17 +449,28 @@ def make_table_generators( # pylint: disable=too-many-locals tables: list[TableGeneratorInfo] = [] vocabulary_tables: list[VocabularyTableGeneratorInfo] = [] + vocab_names = get_vocabulary_table_names(config) for table in metadata.sorted_tables: - table_config = tables_config.get(table.name, {}) - - if table_config.get("vocabulary_table", False): + if table.name in vocab_names: + related = _get_related_table_names(table) + related_non_vocab = related.difference(vocab_names) + if related_non_vocab: + logger.warning( + "Making table '%s' a vocabulary table requires that also the" + " related tables (%s) be also vocabulary tables.", + table.name, + related_non_vocab + ) vocabulary_tables.append( _get_generator_for_existing_vocabulary_table( table, engine ) ) else: - tables.append(_get_generator_for_table(table_config, table)) + tables.append(_get_generator_for_table( + tables_config.get(table.name, {}), + table + )) story_generators = _get_story_generators(config) diff --git a/sqlsynthgen/serialize_metadata.py b/sqlsynthgen/serialize_metadata.py index 87a40669..720120a2 100644 --- a/sqlsynthgen/serialize_metadata.py +++ b/sqlsynthgen/serialize_metadata.py @@ -2,6 +2,7 @@ from sqlalchemy import MetaData, Table, Column, Dialect, ForeignKey from sqlalchemy.dialects import oracle, postgresql from sqlalchemy.sql import sqltypes, schema +from sqlsynthgen.utils import make_foreign_key_name type table_component_t = dict[str, any] type table_t = dict[str, table_component_t] @@ -134,7 +135,7 @@ def column_to_dict(column: Column, dialect: Dialect) -> str: result["foreign_keys"] = foreign_keys return result -def dict_to_column(name, rep: dict) -> Column: +def dict_to_column(table_name, col_name, rep: dict) -> Column: type_sql = rep["type"] try: type_ = type_parser.parse(type_sql) @@ -143,14 +144,18 @@ def dict_to_column(name, rep: dict) -> Column: raise e if "foreign_keys" in rep: args = [ - ForeignKey(fk, ondelete='CASCADE') + ForeignKey( + fk, + name=make_foreign_key_name(table_name, col_name), + ondelete='CASCADE', + ) for fk in rep["foreign_keys"] ] else: args = [] return Column( *args, - name=name, + name=col_name, type_=type_, primary_key=rep.get("primary", False), nullable=rep.get("nullable", None), @@ -194,8 +199,8 @@ def dict_to_table(name: str, meta: MetaData, table_dict: table_t) -> Table: return Table( name, meta, - *[ dict_to_column(name, col) - for (name, col) in table_dict.get("columns", {}).items() + *[ dict_to_column(name, colname, col) + for (colname, col) in table_dict.get("columns", {}).items() ], *[ dict_to_unique(constraint) for constraint in table_dict.get("unique", []) diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index d63dc965..f82eff23 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -182,3 +182,18 @@ def conf_logger(verbose: bool) -> None: logger.addHandler(stdout_handler) logger.addHandler(stderr_handler) + + +def get_vocabulary_table_names(config: Mapping) -> set[str]: + """ + Extract the table names with a vocabulary_table: true property. + """ + return { + table_name + for (table_name, table_config) in config.get("tables", {}).items() + if table_config.get("vocabulary_table", False) + } + + +def make_foreign_key_name(table_name: str, col_name: str) -> str: + return f"{table_name}_{col_name}_fkey" From 4e878ccea9d313cba3cbba17504750f9a2a463a5 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 18 Dec 2024 16:56:20 +0000 Subject: [PATCH 11/85] Fixed some issues --- sqlsynthgen/create.py | 52 ++++++++--------------------------- sqlsynthgen/make.py | 4 +-- sqlsynthgen/utils.py | 63 +++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 74 insertions(+), 45 deletions(-) diff --git a/sqlsynthgen/create.py b/sqlsynthgen/create.py index 59b00ffe..f81bc3fc 100644 --- a/sqlsynthgen/create.py +++ b/sqlsynthgen/create.py @@ -2,14 +2,11 @@ from collections import Counter from typing import Any, Generator, Mapping, Sequence, Tuple -from psycopg2.errors import UndefinedObject -from sqlalchemy import Connection, ForeignKeyConstraint, insert -from sqlalchemy.exc import IntegrityError, ProgrammingError +from sqlalchemy import Connection, insert +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session from sqlalchemy.schema import ( - AddConstraint, CreateSchema, - DropConstraint, MetaData, Table, ) @@ -20,7 +17,8 @@ get_sync_engine, get_vocabulary_table_names, logger, - make_foreign_key_name, + reinstate_vocab_foreign_key_constraints, + remove_vocab_foreign_key_constraints, ) Story = Generator[Tuple[str, dict[str, Any]], dict[str, Any], None] @@ -67,26 +65,10 @@ def create_db_vocab(metadata: MetaData, meta_dict: dict[str, Any], config: Mappi tables_loaded: list[str] = [] + remove_vocab_foreign_key_constraints(metadata, config, dst_engine) vocab_tables = get_vocabulary_table_names(config) for vocab_table_name in vocab_tables: vocab_table = metadata.tables[vocab_table_name] - # Remove foreign key constraints from the table - for fk in vocab_table.foreign_key_constraints: - logger.debug("Dropping constraint %s from table %s", fk.name, vocab_table_name) - with Session(dst_engine) as session: - session.begin() - try: - session.execute(DropConstraint(fk)) - except IntegrityError: - session.rollback() - logger.exception("Dropping table %s key constraint %s failed:", vocab_table_name, fk.name) - except ProgrammingError as e: - session.rollback() - if type(e.orig) is UndefinedObject: - logger.debug("Constraint does not exist") - else: - raise e - # Load data into the table try: logger.debug("Loading vocabulary table %s", vocab_table_name) uploader = FileUploader(table=vocab_table) @@ -97,24 +79,12 @@ def create_db_vocab(metadata: MetaData, meta_dict: dict[str, Any], config: Mappi tables_loaded.append(vocab_table_name) except IntegrityError: logger.exception("Loading the vocabulary table %s failed:", vocab_table) - # Now we add the constraints back to all the tables - for vocab_table_name in vocab_tables: - try: - for (column_name, column_dict) in meta_dict["tables"][vocab_table_name]["columns"].items(): - fk_targets = column_dict.get("foreign_keys", []) - if fk_targets: - fk = ForeignKeyConstraint( - columns=[column_name], - name=make_foreign_key_name(vocab_table_name, column_name), - refcolumns=fk_targets, - ) - with Session(dst_engine) as session: - session.begin() - vocab_table.append_constraint(fk) - session.execute(AddConstraint(fk)) - session.commit() - except IntegrityError: - logger.exception("Restoring table %s foreign keys failed:", vocab_table) + reinstate_vocab_foreign_key_constraints( + metadata, + meta_dict, + config, + dst_engine, + ) return tables_loaded diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 65a4d7af..d04185d5 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -450,8 +450,8 @@ def make_table_generators( # pylint: disable=too-many-locals tables: list[TableGeneratorInfo] = [] vocabulary_tables: list[VocabularyTableGeneratorInfo] = [] vocab_names = get_vocabulary_table_names(config) - for table in metadata.sorted_tables: - if table.name in vocab_names: + for (table_name, table) in metadata.tables.items(): + if table_name in vocab_names: related = _get_related_table_names(table) related_non_vocab = related.difference(vocab_names) if related_non_vocab: diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index f82eff23..c5d7e9fc 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -11,10 +11,24 @@ import yaml from jsonschema.exceptions import ValidationError from jsonschema.validators import validate -from sqlalchemy import Engine, create_engine, event, select +from psycopg2.errors import UndefinedObject +from sqlalchemy import ( + Engine, + create_engine, + event, + select, +) from sqlalchemy.engine.interfaces import DBAPIConnection +from sqlalchemy.exc import IntegrityError, ProgrammingError from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine -from sqlalchemy.schema import MetaData, Table +from sqlalchemy.orm import Session +from sqlalchemy.schema import ( + AddConstraint, + DropConstraint, + ForeignKeyConstraint, + MetaData, + Table, +) # Define some types used repeatedly in the code base MaybeAsyncEngine = Union[Engine, AsyncEngine] @@ -197,3 +211,48 @@ def get_vocabulary_table_names(config: Mapping) -> set[str]: def make_foreign_key_name(table_name: str, col_name: str) -> str: return f"{table_name}_{col_name}_fkey" + + +def remove_vocab_foreign_key_constraints(metadata, config, dst_engine): + vocab_tables = get_vocabulary_table_names(config) + for vocab_table_name in vocab_tables: + vocab_table = metadata.tables[vocab_table_name] + for fk in vocab_table.foreign_key_constraints: + logger.debug("Dropping constraint %s from table %s", fk.name, vocab_table_name) + with Session(dst_engine) as session: + session.begin() + try: + session.execute(DropConstraint(fk)) + session.commit() + except IntegrityError: + session.rollback() + logger.exception("Dropping table %s key constraint %s failed:", vocab_table_name, fk.name) + except ProgrammingError as e: + session.rollback() + if type(e.orig) is UndefinedObject: + logger.debug("Constraint does not exist") + else: + raise e + + +def reinstate_vocab_foreign_key_constraints(metadata, meta_dict, config, dst_engine): + vocab_tables = get_vocabulary_table_names(config) + for vocab_table_name in vocab_tables: + vocab_table = metadata.tables[vocab_table_name] + try: + for (column_name, column_dict) in meta_dict["tables"][vocab_table_name]["columns"].items(): + fk_targets = column_dict.get("foreign_keys", []) + if fk_targets: + fk = ForeignKeyConstraint( + columns=[column_name], + name=make_foreign_key_name(vocab_table_name, column_name), + refcolumns=fk_targets, + ) + logger.debug(f"Restoring foreign key constraint {fk.name}") + with Session(dst_engine) as session: + session.begin() + vocab_table.append_constraint(fk) + session.execute(AddConstraint(fk)) + session.commit() + except IntegrityError: + logger.exception("Restoring table %s foreign keys failed:", vocab_table) From a47c1ee2bf3e4e393cab646690d35b5a5502b0ac Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 18 Dec 2024 17:52:07 +0000 Subject: [PATCH 12/85] Copy with config.yaml tables properties being empty. --- sqlsynthgen/main.py | 3 ++- sqlsynthgen/make.py | 10 ++++++---- sqlsynthgen/utils.py | 12 +++++++++++- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 3896a7f4..f2e622d1 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -23,6 +23,7 @@ from sqlsynthgen.utils import ( CONFIG_SCHEMA_PATH, conf_logger, + get_flag, import_file, logger, read_config_file, @@ -64,7 +65,7 @@ def load_metadata_config(orm_file_name, config=None): if config is not None and "tables" in config: # Remove ignored tables for (name, table_config) in config.get("tables", {}).items(): - if table_config.get("ignore", False): + if get_flag(table_config, "ignore"): tables_dict.pop(name, None) return meta_dict diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index d04185d5..0f677053 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -23,6 +23,8 @@ from sqlsynthgen.utils import ( create_db_engine, download_table, + get_property, + get_flag, get_sync_engine, get_vocabulary_table_names, logger, @@ -108,7 +110,7 @@ def _get_row_generator( ) -> tuple[list[RowGeneratorInfo], list[str]]: """Get the row generators information, for the given table.""" row_gen_info: list[RowGeneratorInfo] = [] - config: list[dict[str, Any]] = table_config.get("row_generators", {}) + config: list[dict[str, Any]] = get_property(table_config, "row_generators", {}) columns_covered = [] for gen_conf in config: name: str = gen_conf["name"] @@ -350,7 +352,7 @@ def _get_generator_for_table( table_name=table.name, class_name=table.name.title() + "Generator", columns=[str(col.name) for col in table.columns], - rows_per_pass=table_config.get("num_rows_per_pass", 1), + rows_per_pass=get_property(table_config, "num_rows_per_pass", 1), unique_constraints=unique_constraints, ) @@ -554,7 +556,7 @@ def make_tables_file( def reflect_if(table_name: str, _: Any) -> bool: table_config = tables_config.get(table_name, {}) - ignore = table_config.get("ignore", False) + ignore = get_flag(table_config, "ignore") return not ignore metadata = MetaData() @@ -566,7 +568,7 @@ def reflect_if(table_name: str, _: Any) -> bool: # for table_name in metadata.tables.keys(): # table_config = tables_config.get(table_name, {}) -# ignore = table_config.get("ignore", False) +# ignore = get_flag(table_config, "ignore") # if ignore: # logger.warning( # "Table %s is supposed to be ignored but there is a foreign key " diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index c5d7e9fc..577e5a9b 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -198,6 +198,16 @@ def conf_logger(verbose: bool) -> None: logger.addHandler(stderr_handler) +def get_flag(maybe_dict, key): + """Returns maybe_dict[key] or False if that doesn't exist""" + return type(maybe_dict) is dict and maybe_dict.get(key, False) + + +def get_property(maybe_dict, key, default): + """Returns maybe_dict[key] or default if that doesn't exist""" + return maybe_dict.get(key, default) if type(maybe_dict) is dict else default + + def get_vocabulary_table_names(config: Mapping) -> set[str]: """ Extract the table names with a vocabulary_table: true property. @@ -205,7 +215,7 @@ def get_vocabulary_table_names(config: Mapping) -> set[str]: return { table_name for (table_name, table_config) in config.get("tables", {}).items() - if table_config.get("vocabulary_table", False) + if get_flag(table_config, "vocabulary_table") } From a457c62fab88b1fe2772ebfb28b3403e3f6a6358 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 19 Dec 2024 13:08:29 +0000 Subject: [PATCH 13/85] Sort tables ourselves so we don't warn about vocab table loops --- sqlsynthgen/base.py | 18 +++---- sqlsynthgen/create.py | 3 +- sqlsynthgen/main.py | 10 ++-- sqlsynthgen/make.py | 13 +---- sqlsynthgen/utils.py | 112 +++++++++++++++++++++++++++++++++++++++--- 5 files changed, 120 insertions(+), 36 deletions(-) diff --git a/sqlsynthgen/base.py b/sqlsynthgen/base.py index 8f885abe..015dac74 100644 --- a/sqlsynthgen/base.py +++ b/sqlsynthgen/base.py @@ -9,7 +9,7 @@ from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.schema import Table -from sqlsynthgen.utils import logger +from sqlsynthgen.utils import logger, stream_yaml class TableGenerator(ABC): @@ -43,20 +43,14 @@ def load(self, connection: Connection) -> None: logger.warning("File %s not found. Skipping...", yaml_file) return try: - with yaml_file.open("r", newline="", encoding="utf-8") as yamlfile: - rows = yaml.load(yamlfile, Loader=yaml.Loader) + rows = stream_yaml(yaml_file) + for row in rows: + stmt = insert(self.table).values(row) + connection.execute(stmt) + connection.commit() except yaml.YAMLError as e: logger.warning("Error reading YAML file %s: %s", yaml_file, e) return - - if not rows: - logger.warning("No rows in %s. Skipping...", yaml_file) - return - - try: - stmt = insert(self.table).values(list(rows)) - connection.execute(stmt) - connection.commit() except SQLAlchemyError as e: logger.warning( "Error inserting rows into table %s: %s", self.table.fullname, e diff --git a/sqlsynthgen/create.py b/sqlsynthgen/create.py index f81bc3fc..faf4a6fc 100644 --- a/sqlsynthgen/create.py +++ b/sqlsynthgen/create.py @@ -192,8 +192,7 @@ def populate( # Generate individual rows, table by table. for table in tables: if table.name not in table_generator_dict: - # We don't have a generator for this table, probably because it's a - # vocabulary table. + # We don't have a generator for this table continue table_generator = table_generator_dict[table.name] if table_generator.num_rows_per_pass == 0: diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index f2e622d1..bd47e051 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -27,6 +27,7 @@ import_file, logger, read_config_file, + sorted_non_vocabulary_tables, ) from .serialize_metadata import dict_to_metadata @@ -58,7 +59,7 @@ def _require_src_db_dsn(settings: Settings) -> str: return src_dsn -def load_metadata_config(orm_file_name, config=None): +def load_metadata_config(orm_file_name, config: dict | None=None): with open(orm_file_name) as orm_fh: meta_dict = yaml.load(orm_fh, yaml.Loader) tables_dict = meta_dict.get("tables", {}) @@ -70,7 +71,7 @@ def load_metadata_config(orm_file_name, config=None): return meta_dict -def load_metadata(orm_file_name, config=None): +def load_metadata(orm_file_name, config: dict | None=None): meta_dict = load_metadata_config(orm_file_name, config) return dict_to_metadata(meta_dict) @@ -114,12 +115,13 @@ def create_data( $ sqlsynthgen create-data """ logger.debug("Creating data.") - orm_metadata = load_metadata(orm_file, config_file) + config = read_config_file(config_file) if config_file is not None else {} + orm_metadata = load_metadata(orm_file, config) ssg_module = import_file(ssg_file) table_generator_dict = ssg_module.table_generator_dict story_generator_list = ssg_module.story_generator_list row_counts = create_db_data( - orm_metadata.sorted_tables, + sorted_non_vocabulary_tables(orm_metadata, config), table_generator_dict, story_generator_list, num_passes, diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 0f677053..9380c357 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -25,6 +25,7 @@ download_table, get_property, get_flag, + get_related_table_names, get_sync_engine, get_vocabulary_table_names, logger, @@ -386,16 +387,6 @@ def _get_story_generators(config: Mapping) -> list[StoryGeneratorInfo]: return generators -def _get_related_table_names(table: Table) -> set[str]: - """ - Get the names of all tables for which there exist foreign keys from this table. - """ - return { - str(fk.referred_table.name) - for fk in table.foreign_key_constraints - } - - def make_vocabulary_tables( metadata: MetaData, config: Mapping, @@ -454,7 +445,7 @@ def make_table_generators( # pylint: disable=too-many-locals vocab_names = get_vocabulary_table_names(config) for (table_name, table) in metadata.tables.items(): if table_name in vocab_names: - related = _get_related_table_names(table) + related = get_related_table_names(table) related_non_vocab = related.difference(vocab_names) if related_non_vocab: logger.warning( diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index 577e5a9b..26de527f 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -91,13 +91,13 @@ def download_table( """Download a Table and store it as a .yaml file.""" stmt = select(table) with engine.connect() as conn: - result = [ - {str(col_name): value for (col_name, value) in row.items()} - for row in conn.execute(stmt).mappings() - ] - - with Path(yaml_file_name).open("w", newline="", encoding="utf-8") as yamlfile: - yamlfile.write(yaml.dump(result)) + with Path(yaml_file_name).open("w", newline="", encoding="utf-8") as yamlfile: + for row in conn.execute(stmt).mappings(): + result = { + str(col_name): value + for (col_name, value) in row.items() + } + yamlfile.write(yaml.dump([result])) def get_sync_engine(engine: MaybeAsyncEngine) -> Engine: @@ -208,6 +208,16 @@ def get_property(maybe_dict, key, default): return maybe_dict.get(key, default) if type(maybe_dict) is dict else default +def get_related_table_names(table: Table) -> set[str]: + """ + Get the names of all tables for which there exist foreign keys from this table. + """ + return { + str(fk.referred_table.name) + for fk in table.foreign_key_constraints + } + + def get_vocabulary_table_names(config: Mapping) -> set[str]: """ Extract the table names with a vocabulary_table: true property. @@ -266,3 +276,91 @@ def reinstate_vocab_foreign_key_constraints(metadata, meta_dict, config, dst_eng session.commit() except IntegrityError: logger.exception("Restoring table %s foreign keys failed:", vocab_table) + + +def stream_yaml(yaml_path): + """ + Stream a yaml list into an iterator. + + Used instead of yaml.load(yaml_path) when the file is + known to be a list and the file might be too long to + be decoded in memory. + """ + buf = "" + with open(yaml_path, "r", encoding="utf-8") as fh: + while True: + line = fh.readline() + if not line or line.startswith("-"): + if buf: + yl = yaml.load(buf, yaml.Loader) + assert type(yl) is list and len(yl) == 1 + yield yl[0] + if not line: + return + buf = "" + buf += line + + +def topological_sort(input_nodes, get_dependencies_fn): + """ + Topoligically sort input_nodes and find any cycles. + + Returns a pair (sorted, cycles). + + 'sorted' is a list of all the elements of input_nodes sorted + so that dependencies returned by get_dependencies_fn + come after nodes that depend on them. Cycles are + arbitrarily broken for this. + + 'cycles' is a list of lists of dependency cycles. + + arguments: + input_nodes: an iterator of nodes to sort. Duplicates + are discarded. + get_dependencies_fn: a function that takes an input + node and returns a list of its dependencies. Any + dependencies not in the input_nodes list are ignored. + """ + # input nodes + white = set(input_nodes) + # output nodes + black = [] + # list of cycles + cycles = [] + while white: + w = white.pop() + # stack of dependencies under consideration + grey = [w] + # nextss[i] are the dependencies of grey[i] yet to be considered + nextss = [get_dependencies_fn(w)] + while grey: + if not nextss[-1]: + black.append(grey.pop()) + nextss.pop() + else: + n = nextss[-1].pop() + if n in white: + # n is unconsidered, move it to the grey stack + white.remove(n) + grey.append(n) + nextss.append(get_dependencies_fn(n)) + elif n in grey: + # n is in a cycle + cycle_start = grey.index(n) + cycles.append(grey[cycle_start:len(grey)]) + return (black, cycles) + + +def sorted_non_vocabulary_tables(metadata: MetaData, config: Mapping) -> list[Table]: + table_names = set( + metadata.tables.keys() + ).difference( + get_vocabulary_table_names(config) + ) + (sorted, cycles) = topological_sort( + table_names, + lambda tn: get_related_table_names(metadata.tables[tn]) + ) + for cycle in cycles: + logger.warning(f"Cycle detected between tables: {cycle}") + return [ metadata.tables[tn] for tn in sorted ] From 92828c5dd21bcbe9998a066feb2f3664b30389f8 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 19 Dec 2024 17:07:48 +0000 Subject: [PATCH 14/85] added --compress to make-vocab --- sqlsynthgen/main.py | 4 +++- sqlsynthgen/make.py | 11 +++++++++-- sqlsynthgen/utils.py | 19 ++++++++++++++++--- 3 files changed, 28 insertions(+), 6 deletions(-) diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index bd47e051..172cdfe0 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -182,6 +182,7 @@ def make_vocab( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), config_file: Optional[str] = Option(None, help="The configuration file"), force: bool = Option(True, help="Overwrite any existing vocabulary file."), + compress: bool = Option(False, help="Compress file to .gz"), ) -> None: """Make files of vocabulary tables. @@ -198,7 +199,8 @@ def make_vocab( make_vocabulary_tables( orm_metadata, generator_config, - overwrite_files=force + overwrite_files=force, + compress=compress, ) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 9380c357..7eac4eb0 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -391,6 +391,7 @@ def make_vocabulary_tables( metadata: MetaData, config: Mapping, overwrite_files: bool, + compress: bool, ): """ Extracts the data from the source database for each @@ -404,7 +405,10 @@ def make_vocabulary_tables( vocab_names = get_vocabulary_table_names(config) for table_name in vocab_names: _generate_vocabulary_table( - metadata.tables[table_name], engine, overwrite_files=overwrite_files + metadata.tables[table_name], + engine, + overwrite_files=overwrite_files, + compress=compress, ) @@ -524,16 +528,19 @@ def _generate_vocabulary_table( table: Table, engine: Engine, overwrite_files: bool = False, + compress=False, ): """ Pulls data out of the source database to make a vocabulary YAML file """ yaml_file_name: str = table.fullname + ".yaml" + if compress: + yaml_file_name += ".gz" if Path(yaml_file_name).exists() and not overwrite_files: logger.debug("%s already exists; not overwriting", yaml_file_name) return logger.debug("Downloading vocabulary table %s", table.name) - download_table(table, engine, yaml_file_name) + download_table(table, engine, yaml_file_name, compress) def make_tables_file( diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index 26de527f..0e6cdc7c 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -7,6 +7,7 @@ from pathlib import Path from types import ModuleType from typing import Any, Final, Mapping, Optional, Union +import gzip import yaml from jsonschema.exceptions import ValidationError @@ -85,19 +86,31 @@ def import_file(file_path: str) -> ModuleType: return module +def open_file(file_name): + return Path(file_name).open("wb") + + +def open_compressed_file(file_name): + return gzip.GzipFile(file_name, "wb") + + def download_table( - table: Table, engine: Engine, yaml_file_name: Union[str, Path] + table: Table, + engine: Engine, + yaml_file_name: Union[str, Path], + compress: bool, ) -> None: """Download a Table and store it as a .yaml file.""" stmt = select(table) + open_fn = open_compressed_file if compress else open_file with engine.connect() as conn: - with Path(yaml_file_name).open("w", newline="", encoding="utf-8") as yamlfile: + with open_fn(yaml_file_name) as yamlfile: for row in conn.execute(stmt).mappings(): result = { str(col_name): value for (col_name, value) in row.items() } - yamlfile.write(yaml.dump([result])) + yamlfile.write(yaml.dump([result]).encode()) def get_sync_engine(engine: MaybeAsyncEngine) -> Engine: From 2f5962740a58865e4918b7dbb8960848195e40a3 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 19 Dec 2024 19:28:04 +0000 Subject: [PATCH 15/85] make-vocab progress reporting --- sqlsynthgen/utils.py | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index 0e6cdc7c..8c0158a2 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -13,7 +13,9 @@ from jsonschema.exceptions import ValidationError from jsonschema.validators import validate from psycopg2.errors import UndefinedObject +import sqlalchemy from sqlalchemy import ( + Connection, Engine, create_engine, event, @@ -34,6 +36,9 @@ # Define some types used repeatedly in the code base MaybeAsyncEngine = Union[Engine, AsyncEngine] +# After every how many rows of vocab table downloading do we see a +# progres update +MAKE_VOCAB_PROGRESS_REPORT_EVERY = 10000 CONFIG_SCHEMA_PATH: Final[Path] = ( Path(__file__).parent / "json_schemas/config_schema.json" @@ -94,6 +99,15 @@ def open_compressed_file(file_name): return gzip.GzipFile(file_name, "wb") +def table_row_count(table: Table, conn: Connection) -> int: + return conn.execute( + select(sqlalchemy.func.count()).select_from(sqlalchemy.table( + table.name, + *[sqlalchemy.column(col.name) for col in table.primary_key.columns.values()], + )) + ).scalar_one() + + def download_table( table: Table, engine: Engine, @@ -101,16 +115,26 @@ def download_table( compress: bool, ) -> None: """Download a Table and store it as a .yaml file.""" - stmt = select(table) open_fn = open_compressed_file if compress else open_file with engine.connect() as conn: with open_fn(yaml_file_name) as yamlfile: + stmt = select(table) + rowcount = table_row_count(table, conn) + count = 0 for row in conn.execute(stmt).mappings(): result = { str(col_name): value for (col_name, value) in row.items() } yamlfile.write(yaml.dump([result]).encode()) + count += 1 + if count % MAKE_VOCAB_PROGRESS_REPORT_EVERY == 0: + logger.info( + "written row %d of %d, %.1f%%", + count, + rowcount, + 100*count/rowcount, + ) def get_sync_engine(engine: MaybeAsyncEngine) -> Engine: From ff7e3e08817e6e12be1b11ee9868685b3a727bcc Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 20 Dec 2024 13:38:25 +0000 Subject: [PATCH 16/85] remove-vocab and remove-data doesn't use ssg.py. Progress for create-vocab. --- sqlsynthgen/base.py | 28 ++++++++++++++++++++++------ sqlsynthgen/main.py | 10 ++-------- sqlsynthgen/remove.py | 27 ++++++++++++--------------- sqlsynthgen/utils.py | 25 ++++++++++++------------- 4 files changed, 48 insertions(+), 42 deletions(-) diff --git a/sqlsynthgen/base.py b/sqlsynthgen/base.py index 015dac74..b5297cde 100644 --- a/sqlsynthgen/base.py +++ b/sqlsynthgen/base.py @@ -1,6 +1,7 @@ """Base table generator classes.""" from abc import ABC, abstractmethod from dataclasses import dataclass +import os from pathlib import Path from typing import Any @@ -9,7 +10,11 @@ from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.schema import Table -from sqlsynthgen.utils import logger, stream_yaml +from sqlsynthgen.utils import ( + logger, + stream_yaml, + MAKE_VOCAB_PROGRESS_REPORT_EVERY, +) class TableGenerator(ABC): @@ -43,11 +48,22 @@ def load(self, connection: Connection) -> None: logger.warning("File %s not found. Skipping...", yaml_file) return try: - rows = stream_yaml(yaml_file) - for row in rows: - stmt = insert(self.table).values(row) - connection.execute(stmt) - connection.commit() + file_size = os.path.getsize(yaml_file) + count = 0 + with open(yaml_file, "r", encoding="utf-8") as fh: + rows = stream_yaml(fh) + for row in rows: + stmt = insert(self.table).values(row) + connection.execute(stmt) + connection.commit() + count += 1 + if count % MAKE_VOCAB_PROGRESS_REPORT_EVERY == 0: + logger.info( + "inserted row %d of %s, %.1f%%", + count, + self.table.name, + 100 * fh.tell() / file_size, + ) except yaml.YAMLError as e: logger.warning("Error reading YAML file %s: %s", yaml_file, e) return diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 172cdfe0..e2a56f57 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -330,8 +330,7 @@ def remove_data( logger.debug("Truncating non-vocabulary tables.") config = read_config_file(config_file) if config_file is not None else {} metadata = load_metadata(orm_file, config) - ssg_module = import_file(ssg_file) - remove_db_data(metadata, ssg_module, config) + remove_db_data(metadata, config) logger.debug("Non-vocabulary tables truncated.") else: logger.info("Would truncate non-vocabulary tables if called with --yes.") @@ -340,10 +339,6 @@ def remove_data( @app.command() def remove_vocab( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), - ssg_file: str = Option( - SSG_FILENAME, - help="The name of the generators file. Must be in the current working directory." - ), config_file: Optional[str] = Option(None, help="The configuration file"), yes: bool = Option(False, "--yes", prompt="Are you sure?", help="Just remove, don't ask first"), ) -> None: @@ -352,8 +347,7 @@ def remove_vocab( logger.debug("Truncating vocabulary tables.") config = read_config_file(config_file) if config_file is not None else {} metadata = load_metadata(orm_file, config) - ssg_module = import_file(ssg_file) - remove_db_vocab(metadata, ssg_module) + remove_db_vocab(metadata, config) logger.debug("Vocabulary tables truncated.") else: logger.info("Would truncate vocabulary tables if called with --yes.") diff --git a/sqlsynthgen/remove.py b/sqlsynthgen/remove.py index 3994c263..cdab8c70 100644 --- a/sqlsynthgen/remove.py +++ b/sqlsynthgen/remove.py @@ -8,31 +8,30 @@ from sqlsynthgen.utils import ( create_db_engine, get_sync_engine, + get_vocabulary_table_names, logger, + sorted_non_vocabulary_tables, ) def remove_db_data( - metadata: MetaData, ssg_module: ModuleType, config: Mapping[str, Any] + metadata: MetaData, config: Mapping[str, Any] ) -> None: """Truncate the synthetic data tables but not the vocabularies.""" settings = get_settings() assert settings.dst_dsn, "Missing destination database settings" - tables_config = config.get("tables", {}) dst_engine = get_sync_engine( create_db_engine(settings.dst_dsn, schema_name=settings.dst_schema) ) with dst_engine.connect() as dst_conn: - for table in reversed(metadata.sorted_tables): - # We presume that all tables that aren't vocab should be truncated - if table.name not in ssg_module.vocab_dict: - logger.debug('Truncating table "%s".', table.name) - dst_conn.execute(delete(table)) - dst_conn.commit() + for table in reversed(sorted_non_vocabulary_tables(metadata, config)): + logger.debug('Truncating table "%s".', table.name) + dst_conn.execute(delete(table)) + dst_conn.commit() -def remove_db_vocab(metadata: MetaData, ssg_module: ModuleType) -> None: +def remove_db_vocab(metadata: MetaData, config: Mapping[str, Any]) -> None: """Truncate the vocabulary tables.""" settings = get_settings() assert settings.dst_dsn, "Missing destination database settings" @@ -41,12 +40,10 @@ def remove_db_vocab(metadata: MetaData, ssg_module: ModuleType) -> None: ) with dst_engine.connect() as dst_conn: - for table in reversed(metadata.sorted_tables): - # We presume that all tables that are vocab should be truncated - if table.name in ssg_module.vocab_dict: - logger.debug('Truncating vocabulary table "%s".', table.name) - dst_conn.execute(delete(table)) - dst_conn.commit() + for table in get_vocabulary_table_names(config): + logger.debug('Truncating vocabulary table "%s".', table) + dst_conn.execute(delete(metadata.tables[table])) + dst_conn.commit() def remove_db_tables(metadata: MetaData) -> None: diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index 8c0158a2..c3275d07 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -315,7 +315,7 @@ def reinstate_vocab_foreign_key_constraints(metadata, meta_dict, config, dst_eng logger.exception("Restoring table %s foreign keys failed:", vocab_table) -def stream_yaml(yaml_path): +def stream_yaml(yaml_file_handle): """ Stream a yaml list into an iterator. @@ -324,18 +324,17 @@ def stream_yaml(yaml_path): be decoded in memory. """ buf = "" - with open(yaml_path, "r", encoding="utf-8") as fh: - while True: - line = fh.readline() - if not line or line.startswith("-"): - if buf: - yl = yaml.load(buf, yaml.Loader) - assert type(yl) is list and len(yl) == 1 - yield yl[0] - if not line: - return - buf = "" - buf += line + while True: + line = yaml_file_handle.readline() + if not line or line.startswith("-"): + if buf: + yl = yaml.load(buf, yaml.Loader) + assert type(yl) is list and len(yl) == 1 + yield yl[0] + if not line: + return + buf = "" + buf += line def topological_sort(input_nodes, get_dependencies_fn): From 405409cdf225edea5a2d4b58d657c911090e144e Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 8 Jan 2025 12:33:07 +0000 Subject: [PATCH 17/85] Initial attempt for generic column generator analysis --- sqlsynthgen/main.py | 4 +- sqlsynthgen/make.py | 235 +++++++++++++++++++++++++++++++++++++++----- 2 files changed, 215 insertions(+), 24 deletions(-) diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index e2a56f57..9314f46e 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -243,6 +243,7 @@ def make_generators( @app.command() def make_stats( + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), config_file: Optional[str] = Option(None, help="The configuration file"), stats_file: str = Option(STATS_FILENAME), force: bool = Option(False, help="Overwrite any existing vocabulary file."), @@ -261,12 +262,13 @@ def make_stats( _check_file_non_existence(stats_file_path) config = read_config_file(config_file) if config_file is not None else {} + orm_metadata = load_metadata(orm_file, config) settings = get_settings() src_dsn: str = _require_src_db_dsn(settings) src_stats = asyncio.get_event_loop().run_until_complete( - make_src_stats(src_dsn, config, settings.src_schema) + make_src_stats(src_dsn, config, orm_metadata, settings.src_schema) ) stats_file_path.write_text(yaml.dump(src_stats), encoding="utf-8") logger.debug("%s created.", stats_file) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 7eac4eb0..4e891ba7 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -3,6 +3,7 @@ import inspect import sys from dataclasses import dataclass, field +import math from pathlib import Path from typing import Any, Callable, Final, Mapping, Optional, Sequence, Tuple import yaml @@ -228,21 +229,65 @@ def _integer_generator(column: Column) -> tuple[str, dict[str, str]]: "accumulator": f'"{column.table.fullname}.{column.name}"' }) -_COLUMN_TYPE_TO_GENERATOR = { - sqltypes.Integer: "generic.numeric.integer_number", - sqltypes.Boolean: "generic.development.boolean", - sqltypes.Date: "generic.datetime.date", - sqltypes.DateTime: "generic.datetime.datetime", - sqltypes.Integer: _integer_generator, # must be before Numeric - sqltypes.Numeric: _numeric_generator, - sqltypes.LargeBinary: "generic.bytes_provider.bytes", - sqltypes.Uuid: "generic.cryptographic.uuid", - postgresql.UUID: "generic.cryptographic.uuid", - sqltypes.String: _string_generator, + +_YEAR_SUMMARY_QUERY = ( + "SELECT MIN(y) AS start, MAX(y) AS end FROM " + "(SELECT EXTRACT(YEAR FROM {column}) AS y FROM {table})" +) + + +@dataclass +class GeneratorInfo: + # Name or function to generate random objects of this type (not using summary data) + generator: str | Callable[[Column], str] + # SQL query that gets the data to supply as arguments to the generator + # ({column} and {table} will be interpolated) + summary_query: str | None = None + # True if we should see if we can treat this column as a choice from a finite set + numeric: bool = False + # True if we should see if we can treat this column as an amount with a distribution + choice: bool = False + +_COLUMN_TYPE_TO_GENERATOR_INFO = { + sqltypes.Boolean: GeneratorInfo( + generator="generic.development.boolean", + choice=True, + ), + sqltypes.Date: GeneratorInfo( + generator="generic.datetime.date", + summary_query=_YEAR_SUMMARY_QUERY, + ), + sqltypes.DateTime: GeneratorInfo( + generator="generic.datetime.datetime", + summary_query=_YEAR_SUMMARY_QUERY, + ), + sqltypes.Integer: GeneratorInfo( # must be before Numeric + generator=_integer_generator, + numeric=True, + choice=True, + ), + sqltypes.Numeric: GeneratorInfo( + generator=_numeric_generator, + numeric=True, + choice=True, + ), + sqltypes.LargeBinary: GeneratorInfo( + generator="generic.bytes_provider.bytes", + ), + sqltypes.Uuid: GeneratorInfo( + generator="generic.cryptographic.uuid", + ), + postgresql.UUID: GeneratorInfo( + generator="generic.cryptographic.uuid", + ), + sqltypes.String: GeneratorInfo( + generator=_string_generator, + choice=True, + ) } -def _get_generator_for_column(column_t: type) -> str | Callable[ - [type_api.TypeEngine], tuple[str, dict[str, str]]]: + +def _get_info_for_column_type(column_t: type) -> GeneratorInfo | None: """ Gets a generator from a column type. @@ -250,17 +295,30 @@ def _get_generator_for_column(column_t: type) -> str | Callable[ given the column.type will return a tuple (string representing generator callable, dict of keyword arguments to pass to the callable). """ - if column_t in _COLUMN_TYPE_TO_GENERATOR: - return _COLUMN_TYPE_TO_GENERATOR[column_t] + if column_t in _COLUMN_TYPE_TO_GENERATOR_INFO: + return _COLUMN_TYPE_TO_GENERATOR_INFO[column_t] # Search exhaustively for a superclass to the columns actual type - for key, value in _COLUMN_TYPE_TO_GENERATOR.items(): + for key, value in _COLUMN_TYPE_TO_GENERATOR_INFO.items(): if issubclass(column_t, key): return value return None +def _get_generator_for_column(column_t: type) -> str | Callable[ + [type_api.TypeEngine], tuple[str, dict[str, str]]]: + """ + Gets a generator from a column type. + + Returns either a string representing the callable, or a callable that, + given the column.type will return a tuple (string representing generator + callable, dict of keyword arguments to pass to the callable). + """ + info = _get_info_for_column_type(column_t) + return None if info is None else info.generator + + def _get_generator_and_arguments(column: Column) -> tuple[str, dict[str, str]]: """ Gets the generator and its arguments from the column type, returning @@ -579,8 +637,42 @@ def reflect_if(table_name: str, _: Any) -> bool: return yaml.dump(meta_dict) +def zipf_distribution(total, bins): + basic_dist = list(map(lambda n: 1/n, range(1, bins + 1))) + bd_remaining = sum(basic_dist) + for b in basic_dist: + # yield b/bd_remaining of the `total` remaining + if bd_remaining == 0: + yield 0 + else: + x = math.floor(0.5 + total * b / bd_remaining) + bd_remaining -= x * bd_remaining / total + total -= x + yield x + + +def uniform_distribution(total, bins): + p = total // bins + n = total % bins + for i in range(0, n): + yield p + 1 + for i in range(n, bins): + yield p + + +def fit_error(test, actual): + return sum(map(lambda t, a: (t - a)*(t - a), test, actual)) + + +_CDF_BUCKETS = { + "normal": [0.0227, 0.0441, 0.0918, 0.1499, 0.1915, 0.1915, 0.1499, 0.0918, 0.0441, 0.0227], + # Uniform between -1 and 1, pdf(x) = 0.5 + "uniform": [0, 0, 0.0918, 0.204, 0.204, 0.204, 0.204, 0.0918, 0, 0], +} + + async def make_src_stats( - dsn: str, config: Mapping, schema_name: Optional[str] = None + dsn: str, config: Mapping, metadata: MetaData, schema_name: Optional[str] = None ) -> dict[str, list[dict]]: """Run the src-stats queries specified by the configuration. @@ -598,16 +690,19 @@ async def make_src_stats( use_asyncio = config.get("use-asyncio", False) engine = create_db_engine(dsn, schema_name=schema_name, use_asyncio=use_asyncio) - async def execute_query(query_block: Mapping[str, Any]) -> Any: - """Execute query in query_block.""" - logger.debug("Executing query %s", query_block["name"]) - query = text(query_block["query"]) + async def execute_raw_query(query: str): if isinstance(engine, AsyncEngine): async with engine.connect() as conn: - raw_result = await conn.execute(query) + return await conn.execute(query) else: with engine.connect() as conn: - raw_result = conn.execute(query) + return conn.execute(query) + + async def execute_query(query_block: Mapping[str, Any]) -> Any: + """Execute query in query_block.""" + logger.debug("Executing query %s", query_block["name"]) + query = text(query_block["query"]) + raw_result = execute_raw_query(query) if "dp-query" in query_block: result_df = pd.DataFrame(raw_result.mappings()) @@ -640,4 +735,98 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: for name, result in src_stats.items(): if not result: logger.warning("src-stats query %s returned no results", name) + + generic = {} + tables_config = config.get("tables", {}) + for table_name, table in metadata.tables.items(): + table_config = tables_config.get(table_name, None) + vocab_columns = set() if table_config is None else set(table_config.get("vocabulary_columns", [])) + for column_name, column in table.columns.items(): + is_vocab = column_name in vocab_columns + info = _get_info_for_column_type(type(column.type)) + if info is not None: + best_generic_generator = None + if info.numeric: + # Find summary information; mean, standard deviation and buckets 1/2 standard deviation width around mean. + results = await execute_raw_query(text( + "SELECT AVG({column}) AS mean, STDDEV({column}) AS sd, COUNT({column}) AS count FROM {table}".format( + column=column_name, table=table_name + ) + )) + result = results.first() + count = result.count + if result.sd is not None and 0 < result.sd: + raw_buckets = await execute_raw_query(text( + "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) as b from {table} group by b".format( + column=column_name, table=table_name, x=result.mean - 2 * result.sd, w = result.sd / 2 + ) + )) + buckets = [0] * 10 + for rb in raw_buckets: + bucket = min(9, max(0, int(rb.b) + 1)) + buckets[bucket] += rb.f / count + best_fit = None + best_fit_distribution = None + for dist_name, dist_buckets in _CDF_BUCKETS.items(): + fit = fit_error(dist_buckets, buckets) + if best_fit is None or fit < best_fit: + best_fit = fit + best_fit_distribution = dist_name + best_generic_generator = { + "name": best_fit_distribution, + "fit": best_fit, + "mean": result.mean, + "sd": result.sd, + } + if info.choice: + # Find information on how many of each example there is + results = await execute_raw_query(text( + "SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC".format( + column=column_name, table=table_name + ) + )) + values = [] + counts = [] + total = 0 + for result in results: + c = result.f + if c != 0: + total += c + counts.append(c) + values.append(result.v) + if counts: + # Which distribution fits best? + zipf = zipf_distribution(total, len(counts)) + zipf_fit = fit_error(zipf, counts) + unif = uniform_distribution(total, len(counts)) + unif_fit = fit_error(unif, counts) + if best_generic_generator is None or zipf_fit < best_generic_generator["fit"]: + best_generic_generator = { + "name": "zipf", + "fit": zipf_fit, + "bucket_count": len(counts), + } + if is_vocab: + best_generic_generator["buckets"] = values + if best_generic_generator is None or unif_fit < best_generic_generator["fit"]: + best_generic_generator = { + "name": "uniform_choice", + "fit": unif_fit, + "bucket_count": len(counts), + } + if is_vocab: + best_generic_generator["buckets"] = values + if info.summary_query is not None: + results = await execute_raw_query(text(info.summary_query.format( + column=column_name, table=table_name + ))) + best_generic_generator = { "name": info.generator } + for k, v in results.mappings().first().items(): + best_generic_generator[k] = v + if best_generic_generator is not None: + if table_name not in generic: + generic[str(table_name)] = {} + generic[str(table_name)][str(column_name)] = best_generic_generator + if generic: + src_stats["_sqlsynthgen_generic"] = generic return src_stats From dc180eb913d2f6f706295fc0fa2ce97db5e51485 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 8 Jan 2025 18:40:49 +0000 Subject: [PATCH 18/85] fit fix and tidy up --- sqlsynthgen/make.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 4e891ba7..6326fa5c 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -1,5 +1,6 @@ """Functions to make a module of generator classes.""" import asyncio +import decimal import inspect import sys from dataclasses import dataclass, field @@ -775,8 +776,8 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: best_generic_generator = { "name": best_fit_distribution, "fit": best_fit, - "mean": result.mean, - "sd": result.sd, + "mean": float(result.mean), + "sd": float(result.sd), } if info.choice: # Find information on how many of each example there is @@ -793,13 +794,17 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: if c != 0: total += c counts.append(c) - values.append(result.v) + v = result.v + if type(v) is decimal.Decimal: + v = float(v) + values.append(v) if counts: + total2 = total * total # Which distribution fits best? zipf = zipf_distribution(total, len(counts)) - zipf_fit = fit_error(zipf, counts) + zipf_fit = fit_error(zipf, counts) / total2 unif = uniform_distribution(total, len(counts)) - unif_fit = fit_error(unif, counts) + unif_fit = fit_error(unif, counts) / total2 if best_generic_generator is None or zipf_fit < best_generic_generator["fit"]: best_generic_generator = { "name": "zipf", @@ -822,7 +827,7 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: ))) best_generic_generator = { "name": info.generator } for k, v in results.mappings().first().items(): - best_generic_generator[k] = v + best_generic_generator[k] = float(v) if best_generic_generator is not None: if table_name not in generic: generic[str(table_name)] = {} From 2654db8a528bdb9aa6bae5d920169fbc2381c373 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 9 Jan 2025 11:45:11 +0000 Subject: [PATCH 19/85] small fixes --- sqlsynthgen/main.py | 4 ---- sqlsynthgen/make.py | 15 ++++++++------- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 9314f46e..5d8aef1d 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -320,10 +320,6 @@ def validate_config( @app.command() def remove_data( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), - ssg_file: str = Option( - SSG_FILENAME, - help="The name of the generators file. Must be in the current working directory." - ), config_file: Optional[str] = Option(None, help="The configuration file"), yes: bool = Option(False, "--yes", prompt="Are you sure?", help="Just remove, don't ask first"), ) -> None: diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 6326fa5c..11749b70 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -667,8 +667,9 @@ def fit_error(test, actual): _CDF_BUCKETS = { "normal": [0.0227, 0.0441, 0.0918, 0.1499, 0.1915, 0.1915, 0.1499, 0.0918, 0.0441, 0.0227], - # Uniform between -1 and 1, pdf(x) = 0.5 - "uniform": [0, 0, 0.0918, 0.204, 0.204, 0.204, 0.204, 0.0918, 0, 0], + # Uniform wih mean 0 and sigma 1 runs between +/-sqrt(3) = +/-1.732 + # and has height 1 / 2sqrt(3) = 0.28868. + "uniform": [0, 0.06698, 0.14434, 0.14434, 0.14434, 0.14434, 0.14434, 0.14434, 0.06698, 0], } @@ -745,7 +746,7 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: for column_name, column in table.columns.items(): is_vocab = column_name in vocab_columns info = _get_info_for_column_type(type(column.type)) - if info is not None: + if not column.foreign_keys and info is not None: best_generic_generator = None if info.numeric: # Find summary information; mean, standard deviation and buckets 1/2 standard deviation width around mean. @@ -809,18 +810,18 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: best_generic_generator = { "name": "zipf", "fit": zipf_fit, - "bucket_count": len(counts), + "value_count": len(counts), } if is_vocab: - best_generic_generator["buckets"] = values + best_generic_generator["values"] = values if best_generic_generator is None or unif_fit < best_generic_generator["fit"]: best_generic_generator = { "name": "uniform_choice", "fit": unif_fit, - "bucket_count": len(counts), + "value_count": len(counts), } if is_vocab: - best_generic_generator["buckets"] = values + best_generic_generator["values"] = values if info.summary_query is not None: results = await execute_raw_query(text(info.summary_query.format( column=column_name, table=table_name From ef3e690a8a5a97466f018b362cce051084f02e5b Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 14 Jan 2025 19:25:54 +0000 Subject: [PATCH 20/85] "obvious" generators --- sqlsynthgen/base.py | 25 ++++++++ sqlsynthgen/main.py | 5 +- sqlsynthgen/make.py | 110 ++++++++++++++++++++------------ sqlsynthgen/remove.py | 7 +- sqlsynthgen/templates/ssg.py.j2 | 6 +- 5 files changed, 105 insertions(+), 48 deletions(-) diff --git a/sqlsynthgen/base.py b/sqlsynthgen/base.py index b5297cde..49682262 100644 --- a/sqlsynthgen/base.py +++ b/sqlsynthgen/base.py @@ -1,6 +1,7 @@ """Base table generator classes.""" from abc import ABC, abstractmethod from dataclasses import dataclass +import numpy import os from pathlib import Path from typing import Any @@ -16,6 +17,30 @@ MAKE_VOCAB_PROGRESS_REPORT_EVERY, ) +def zipf_weights(size): + total = sum(map(lambda n: 1/n, range(1, size + 1))) + return [ + 1 / (n * total) + for n in range(1, size + 1) + ] + + +class DistributionGenerator: + def __init__(self): + self.rng = numpy.random.default_rng() + + def uniform(self, low: float, high: float) -> float: + return self.rng.uniform(low=low, high=high) + + def normal(self, mean: float, sd: float) -> float: + return self.rng.normal(loc=mean, scale=sd) + + def choice(self, a): + return self.rng.choice(a).item() + + def zipf_choice(self, a, n): + return self.rng.choice(a, p = zipf_weights(n)).item() + class TableGenerator(ABC): """Abstract base class for table generator classes.""" diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 5d8aef1d..4c0e43d4 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -344,8 +344,9 @@ def remove_vocab( if yes: logger.debug("Truncating vocabulary tables.") config = read_config_file(config_file) if config_file is not None else {} - metadata = load_metadata(orm_file, config) - remove_db_vocab(metadata, config) + meta_dict = load_metadata_config(orm_file, config) + orm_metadata = dict_to_metadata(meta_dict) + remove_db_vocab(orm_metadata, meta_dict, config) logger.debug("Vocabulary tables truncated.") else: logger.info("Would truncate vocabulary tables if called with --yes.") diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 11749b70..5e1985fd 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -233,7 +233,7 @@ def _integer_generator(column: Column) -> tuple[str, dict[str, str]]: _YEAR_SUMMARY_QUERY = ( "SELECT MIN(y) AS start, MAX(y) AS end FROM " - "(SELECT EXTRACT(YEAR FROM {column}) AS y FROM {table})" + "(SELECT EXTRACT(YEAR FROM {column}) AS y FROM {table}) AS years" ) @@ -244,6 +244,10 @@ class GeneratorInfo: # SQL query that gets the data to supply as arguments to the generator # ({column} and {table} will be interpolated) summary_query: str | None = None + # Dictionary of the names returned from the summary_query to arg types. + # An arg type is a callable turning the returned value into a Python type to + # pass as an argument to the generator. + arg_types: dict[str, Callable] = field(default_factory=dict) # True if we should see if we can treat this column as a choice from a finite set numeric: bool = False # True if we should see if we can treat this column as an amount with a distribution @@ -257,10 +261,12 @@ class GeneratorInfo: sqltypes.Date: GeneratorInfo( generator="generic.datetime.date", summary_query=_YEAR_SUMMARY_QUERY, + arg_types={ "start": int, "end": int } ), sqltypes.DateTime: GeneratorInfo( generator="generic.datetime.datetime", summary_query=_YEAR_SUMMARY_QUERY, + arg_types={ "start": int, "end": int } ), sqltypes.Integer: GeneratorInfo( # must be before Numeric generator=_integer_generator, @@ -388,7 +394,9 @@ def __init__(self, *columns: Column, name: str): def _get_generator_for_table( - table_config: Mapping[str, Any], table: Table + table_config: Mapping[str, Any], + table: Table, + src_stats: Mapping[str, Any]=None ) -> TableGeneratorInfo: """Get generator information for the given table.""" unique_constraints = sorted( @@ -419,10 +427,22 @@ def _get_generator_for_table( row_gen_info_data, columns_covered = _get_row_generator(table_config) table_data.row_gens.extend(row_gen_info_data) + generic_generators = get_property(src_stats, "_sqlsynthgen_generic", {}).get(table.name, {}) for column in table.columns: if column.name not in columns_covered: # No generator for this column in the user config. - table_data.row_gens.append(_get_default_generator(column)) + # Perhaps there is something for us in src-stats.yaml's + # _sqlsynthgen_generic? + if column.name in generic_generators: + gen = generic_generators[column.name] + table_data.row_gens.append( + RowGeneratorInfo([column.name], FunctionCall( + gen["name"], + [f"{k}={v}" for (k, v) in gen.get("kwargs", {}).items()] + )) + ) + else: + table_data.row_gens.append(_get_default_generator(column)) return table_data @@ -503,6 +523,10 @@ def make_table_generators( # pylint: disable=too-many-locals tables_config = config.get("tables", {}) engine = get_sync_engine(create_db_engine(src_dsn, schema_name=settings.src_schema)) + src_stats = {} + with open(src_stats_filename, "r", encoding="utf-8") as f: + src_stats = yaml.unsafe_load(f) + tables: list[TableGeneratorInfo] = [] vocabulary_tables: list[VocabularyTableGeneratorInfo] = [] vocab_names = get_vocabulary_table_names(config) @@ -525,7 +549,8 @@ def make_table_generators( # pylint: disable=too-many-locals else: tables.append(_get_generator_for_table( tables_config.get(table.name, {}), - table + table, + src_stats, )) story_generators = _get_story_generators(config) @@ -568,14 +593,6 @@ def _get_generator_for_existing_vocabulary_table( """ Turns an existing vocabulary YAML file into a VocabularyTableGeneratorInfo. """ - yaml_file_name: str = table_file_name or table.fullname + ".yaml" - if not Path(yaml_file_name).exists(): - logger.error("%s has not already been generated, please run make-vocab first", yaml_file_name) - sys.exit(1) - logger.debug("Downloading vocabulary table %s", table.name) - download_table(table, engine, yaml_file_name) - logger.debug("Done downloading %s", table.name) - return VocabularyTableGeneratorInfo( dictionary_entry=table.name, variable_name=f"{table.name.lower()}_vocab", @@ -623,17 +640,17 @@ def reflect_if(table_name: str, _: Any) -> bool: ) meta_dict = metadata_to_dict(metadata, db_dsn, engine.dialect) -# for table_name in metadata.tables.keys(): -# table_config = tables_config.get(table_name, {}) -# ignore = get_flag(table_config, "ignore") -# if ignore: -# logger.warning( -# "Table %s is supposed to be ignored but there is a foreign key " -# "reference to it. " -# "You may need to create this table manually at the dst schema before " -# "running create-tables.", -# table_name, -# ) + for table_name in metadata.tables.keys(): + table_config = tables_config.get(table_name, {}) + ignore = get_flag(table_config, "ignore") + if ignore: + logger.warning( + "Table %s is supposed to be ignored but there is a foreign key " + "reference to it. " + "You may need to create this table manually at the dst schema before " + "running create-tables.", + table_name, + ) return yaml.dump(meta_dict) @@ -666,10 +683,16 @@ def fit_error(test, actual): _CDF_BUCKETS = { - "normal": [0.0227, 0.0441, 0.0918, 0.1499, 0.1915, 0.1915, 0.1499, 0.0918, 0.0441, 0.0227], + "dist_gen.normal": { + "buckets": [0.0227, 0.0441, 0.0918, 0.1499, 0.1915, 0.1915, 0.1499, 0.0918, 0.0441, 0.0227], + "kwarg_fn": lambda mean, sd: {"mean": mean, "sd": sd}, + }, # Uniform wih mean 0 and sigma 1 runs between +/-sqrt(3) = +/-1.732 # and has height 1 / 2sqrt(3) = 0.28868. - "uniform": [0, 0.06698, 0.14434, 0.14434, 0.14434, 0.14434, 0.14434, 0.14434, 0.06698, 0], + "dist_gen.uniform": { + "buckets": [0, 0.06698, 0.14434, 0.14434, 0.14434, 0.14434, 0.14434, 0.14434, 0.06698, 0], + "kwarg_fn": lambda mean, sd: {"low": mean - sd * math.sqrt(3), "high": mean + sd * math.sqrt(3)}, + }, } @@ -746,8 +769,8 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: for column_name, column in table.columns.items(): is_vocab = column_name in vocab_columns info = _get_info_for_column_type(type(column.type)) - if not column.foreign_keys and info is not None: - best_generic_generator = None + best_generic_generator = None + if not column.foreign_keys and not column.primary_key and info is not None: if info.numeric: # Find summary information; mean, standard deviation and buckets 1/2 standard deviation width around mean. results = await execute_raw_query(text( @@ -769,18 +792,19 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: buckets[bucket] += rb.f / count best_fit = None best_fit_distribution = None - for dist_name, dist_buckets in _CDF_BUCKETS.items(): - fit = fit_error(dist_buckets, buckets) + best_fit_info = None + for dist_name, dist_info in _CDF_BUCKETS.items(): + fit = fit_error(dist_info["buckets"], buckets) if best_fit is None or fit < best_fit: best_fit = fit best_fit_distribution = dist_name + best_fit_info = dist_info best_generic_generator = { "name": best_fit_distribution, "fit": best_fit, - "mean": float(result.mean), - "sd": float(result.sd), + "kwargs": best_fit_info["kwarg_fn"](float(result.mean), float(result.sd)), } - if info.choice: + if info.choice and is_vocab: # For now let's not try to generate choices of unknowable stuff, just generate unknowable stuff. # Find information on how many of each example there is results = await execute_raw_query(text( "SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC".format( @@ -808,27 +832,29 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: unif_fit = fit_error(unif, counts) / total2 if best_generic_generator is None or zipf_fit < best_generic_generator["fit"]: best_generic_generator = { - "name": "zipf", + "name": "dist_gen.zipf_choice", "fit": zipf_fit, - "value_count": len(counts), + "kwargs": { + "a": values, + "n": f"{len(counts)}", + } } - if is_vocab: - best_generic_generator["values"] = values if best_generic_generator is None or unif_fit < best_generic_generator["fit"]: best_generic_generator = { - "name": "uniform_choice", + "name": "dist_gen.choice", "fit": unif_fit, - "value_count": len(counts), + "kwargs": { + "a": values, + } } - if is_vocab: - best_generic_generator["values"] = values if info.summary_query is not None: results = await execute_raw_query(text(info.summary_query.format( column=column_name, table=table_name ))) - best_generic_generator = { "name": info.generator } + best_generic_generator = { "name": info.generator, "kwargs": {} } for k, v in results.mappings().first().items(): - best_generic_generator[k] = float(v) + conv_fn = info.arg_types.get(k, float) + best_generic_generator["kwargs"][k] = conv_fn(v) if best_generic_generator is not None: if table_name not in generic: generic[str(table_name)] = {} diff --git a/sqlsynthgen/remove.py b/sqlsynthgen/remove.py index cdab8c70..ba62b6cd 100644 --- a/sqlsynthgen/remove.py +++ b/sqlsynthgen/remove.py @@ -1,5 +1,4 @@ """Functions and classes to undo the operations in create.py.""" -from types import ModuleType from typing import Any, Mapping from sqlalchemy import delete, MetaData @@ -10,6 +9,8 @@ get_sync_engine, get_vocabulary_table_names, logger, + remove_vocab_foreign_key_constraints, + reinstate_vocab_foreign_key_constraints, sorted_non_vocabulary_tables, ) @@ -31,7 +32,7 @@ def remove_db_data( dst_conn.commit() -def remove_db_vocab(metadata: MetaData, config: Mapping[str, Any]) -> None: +def remove_db_vocab(metadata: MetaData, meta_dict: Mapping[str, Any], config: Mapping[str, Any]) -> None: """Truncate the vocabulary tables.""" settings = get_settings() assert settings.dst_dsn, "Missing destination database settings" @@ -40,10 +41,12 @@ def remove_db_vocab(metadata: MetaData, config: Mapping[str, Any]) -> None: ) with dst_engine.connect() as dst_conn: + remove_vocab_foreign_key_constraints(metadata, config, dst_conn) for table in get_vocabulary_table_names(config): logger.debug('Truncating vocabulary table "%s".', table) dst_conn.execute(delete(metadata.tables[table])) dst_conn.commit() + reinstate_vocab_foreign_key_constraints(metadata, meta_dict, config, dst_conn) def remove_db_tables(metadata: MetaData) -> None: diff --git a/sqlsynthgen/templates/ssg.py.j2 b/sqlsynthgen/templates/ssg.py.j2 index 7b0e9781..1114d184 100644 --- a/sqlsynthgen/templates/ssg.py.j2 +++ b/sqlsynthgen/templates/ssg.py.j2 @@ -1,12 +1,14 @@ """This file was auto-generated by sqlsynthgen but can be edited manually.""" -from mimesis import Generic, Numeric +from mimesis import Generic, Numeric, Person from mimesis.locales import Locale import sqlalchemy -from sqlsynthgen.base import FileUploader, TableGenerator +from sqlsynthgen.base import FileUploader, TableGenerator, DistributionGenerator from sqlsynthgen.main import load_metadata generic = Generic(locale=Locale.EN_GB) numeric = Numeric() +person = Person() +dist_gen = DistributionGenerator() {% for provider_import in provider_imports %} from sqlsynthgen.providers import {{ provider_import }} From 128befcfaaa2475834e58e9109afae472ed5b198 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 15 Jan 2025 14:14:33 +0000 Subject: [PATCH 21/85] Incomplete summary data fails all distrubtions that depend on it. --- sqlsynthgen/make.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 5e1985fd..f87d6fee 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -253,6 +253,21 @@ class GeneratorInfo: # True if we should see if we can treat this column as an amount with a distribution choice: bool = False + +def get_result_mappings(info: GeneratorInfo, results) -> dict[str, Any]: + """ + Gets a mapping from the results of a database query as a Python + dictionary converted according to the GeneratorInfo provided. + """ + kw = {} + for k, v in results.mappings().first().items(): + if v is None: + return None + conv_fn = info.arg_types.get(k, float) + kw[k] = conv_fn(v) + return kw + + _COLUMN_TYPE_TO_GENERATOR_INFO = { sqltypes.Boolean: GeneratorInfo( generator="generic.development.boolean", @@ -804,7 +819,7 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: "fit": best_fit, "kwargs": best_fit_info["kwarg_fn"](float(result.mean), float(result.sd)), } - if info.choice and is_vocab: # For now let's not try to generate choices of unknowable stuff, just generate unknowable stuff. + if info.choice and is_vocab: # If it's not a vocabulary column then it's less useful to work out the choice distribution # Find information on how many of each example there is results = await execute_raw_query(text( "SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC".format( @@ -851,10 +866,9 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: results = await execute_raw_query(text(info.summary_query.format( column=column_name, table=table_name ))) - best_generic_generator = { "name": info.generator, "kwargs": {} } - for k, v in results.mappings().first().items(): - conv_fn = info.arg_types.get(k, float) - best_generic_generator["kwargs"][k] = conv_fn(v) + kw = get_result_mappings(info, results) + if kw is not None: + best_generic_generator = { "name": info.generator, "kwargs": kw } if best_generic_generator is not None: if table_name not in generic: generic[str(table_name)] = {} From 5f173715d9553eb8fb2566933f0e126f7040ddbe Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 15 Jan 2025 15:52:07 +0000 Subject: [PATCH 22/85] distrubution buckets should not include NULL --- sqlsynthgen/make.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index f87d6fee..d1b7e7e9 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -797,14 +797,15 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: count = result.count if result.sd is not None and 0 < result.sd: raw_buckets = await execute_raw_query(text( - "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) as b from {table} group by b".format( + "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( column=column_name, table=table_name, x=result.mean - 2 * result.sd, w = result.sd / 2 ) )) buckets = [0] * 10 for rb in raw_buckets: - bucket = min(9, max(0, int(rb.b) + 1)) - buckets[bucket] += rb.f / count + if rb.b is not None: + bucket = min(9, max(0, int(rb.b) + 1)) + buckets[bucket] += rb.f / count best_fit = None best_fit_distribution = None best_fit_info = None From 08ef715fec741d6b15f35856431e2d3d380e361d Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 15 Jan 2025 16:27:08 +0000 Subject: [PATCH 23/85] protect against standard deviation being NaN --- sqlsynthgen/make.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index d1b7e7e9..747890aa 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -2,7 +2,6 @@ import asyncio import decimal import inspect -import sys from dataclasses import dataclass, field import math from pathlib import Path @@ -795,7 +794,7 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: )) result = results.first() count = result.count - if result.sd is not None and 0 < result.sd: + if result.sd is not None and not math.isnan(result.sd) and 0 < result.sd: raw_buckets = await execute_raw_query(text( "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( column=column_name, table=table_name, x=result.mean - 2 * result.sd, w = result.sd / 2 From cdf0480e935e6579fb6cdca8e929e4144b0094c8 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 15 Jan 2025 19:13:04 +0000 Subject: [PATCH 24/85] Fixed dynamic module loading --- sqlsynthgen/utils.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index c3275d07..44070280 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -4,6 +4,7 @@ import os import sys from importlib import import_module +import importlib.util from pathlib import Path from types import ModuleType from typing import Any, Final, Mapping, Optional, Union @@ -79,15 +80,9 @@ def import_file(file_path: str) -> ModuleType: Returns: ModuleType """ - module_name = os.path.splitext(os.path.basename(file_path))[0] - - sys.path.append(os.path.dirname(os.path.abspath(file_path))) - - try: - module = import_module(module_name) - finally: - sys.path.pop() - + spec = importlib.util.spec_from_file_location("ssg", file_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) return module From ab571bf9c50033507b4c6464394a2fd7ff2e0476 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 15 Jan 2025 19:14:23 +0000 Subject: [PATCH 25/85] Fixed make-generators without src-stats.yaml --- sqlsynthgen/make.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 747890aa..87b7dbe3 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -538,8 +538,9 @@ def make_table_generators( # pylint: disable=too-many-locals engine = get_sync_engine(create_db_engine(src_dsn, schema_name=settings.src_schema)) src_stats = {} - with open(src_stats_filename, "r", encoding="utf-8") as f: - src_stats = yaml.unsafe_load(f) + if src_stats_filename: + with open(src_stats_filename, "r", encoding="utf-8") as f: + src_stats = yaml.unsafe_load(f) tables: list[TableGeneratorInfo] = [] vocabulary_tables: list[VocabularyTableGeneratorInfo] = [] From 4e038b46d27c26fc6e9fce68a4013146aa487747 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 13 Feb 2025 15:18:24 +0000 Subject: [PATCH 26/85] Initial missingness implementation including union column sets --- sqlsynthgen/create.py | 5 +- sqlsynthgen/make.py | 403 +++++++++++++++++++++++++++++++----------- sqlsynthgen/utils.py | 1 - 3 files changed, 298 insertions(+), 111 deletions(-) diff --git a/sqlsynthgen/create.py b/sqlsynthgen/create.py index faf4a6fc..cfd55dbe 100644 --- a/sqlsynthgen/create.py +++ b/sqlsynthgen/create.py @@ -1,5 +1,6 @@ """Functions and classes to create and populate the target database.""" from collections import Counter +import random from typing import Any, Generator, Mapping, Sequence, Tuple from sqlalchemy import Connection, insert @@ -133,7 +134,7 @@ def _populate_story( table = table_dict[table_name] if table.name in table_generator_dict: table_generator = table_generator_dict[table.name] - default_values = table_generator(dst_conn) + default_values = table_generator(dst_conn, random.random) else: default_values = {} insert_values = {**default_values, **provided_values} @@ -201,7 +202,7 @@ def populate( # Run all the inserts for one table in a transaction with dst_conn.begin(): for _ in range(table_generator.num_rows_per_pass): - stmt = insert(table).values(table_generator(dst_conn)) + stmt = insert(table).values(table_generator(dst_conn, random.random)) dst_conn.execute(stmt) row_counts[table.name] = row_counts.get(table.name, 0) + 1 return row_counts diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 87b7dbe3..eb2c94af 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -5,7 +5,9 @@ from dataclasses import dataclass, field import math from pathlib import Path -from typing import Any, Callable, Final, Mapping, Optional, Sequence, Tuple +from typing import ( + Any, Callable, Iterable, Final, Mapping, Optional, Self, Sequence, Tuple +) import yaml import pandas as pd @@ -69,13 +71,137 @@ class RowGeneratorInfo: primary_key: bool = False +@dataclass +class ColumnChoice: + """ Chooses columns based on a random number in [0,1) """ + options: list[tuple[float, set[str]]] + + def all_columns(self) -> set[str]: + """ Returns the set of all columns known """ + cols: set[str] = set() + for (_, cs) in self.options: + cols.update(cs) + return cols + + def choose(self, p: float) -> set[str]: + """ + Returns a set of columns that should have non-null values set. + + p is a random number 0 <= p < 1 upon which this choice is made. + """ + for (cumulative_probability, values) in self.options: + if p < cumulative_probability: + return values + return [] + + @classmethod + def make( + _cls, + cols: Iterable[str], + dependent_columns: dict[str, Iterable[str]], + row_count: int | None, + value_count: dict[str, int], + ) -> Self: + """ + Makes a ColumnChoice out of a union + + cols: the columns in the union + dependent_columns: a dict whose keys are a subset of cols, and + whose values are the names of the columns (including the key!) + that share row generators with this column (if any). + row_count: The total number of rows in the table, if known. + value_count: A dict whose keys are a subset of cols, and whose + values are the number of nonnull values in this column. + Columns for which this number is not known are not in the + keys of this dict. + """ + total_value_count = 0 + counted_column_count = 0 + for col in cols: + vc = value_count[col] + if vc is not None: + counted_column_count += 1 + total_value_count += vc + # work out what proportion to assign to uncounted columns + if row_count is None: + if counted_column_count == 0: + default_count = 1 + row_count = len(cols) + else: + default_count = total_value_count / counted_column_count + row_count = default_count * len(cols) + elif counted_column_count == len(cols): + default_count = 0 + else: + default_count = row_count / (len(cols) - counted_column_count) + cumulative_count = 0 + choice = ColumnChoice(options=[]) + for col in cols: + cumulative_count += value_count.get(col, default_count) + proportion = cumulative_count / row_count + if col in dependent_columns: + choice.options.append((proportion, dependent_columns[col])) + else: + choice.options.append((proportion, {col})) + return choice + + +def make_column_choices( + table_name: str, + table_config: Mapping[str, Any], + src_stats: Mapping[str, Any], +) -> list[ColumnChoice]: + # each union is a dict of union names to a list of its columns + unions: dict[str, list[str]] = get_property(table_config, "unions", {}) + generic = src_stats.get("_sqlsynthgen_generic", {}) + table_stats = generic.get(table_name, {}) + column_generators = table_stats.get("column_generators", {}) + # Set of all columns that are part of a union + columns_in_union: set[str] = set() + for (union_name, cols) in unions.items(): + for col in cols: + if col in columns_in_union: + logger.warning("union %s overlaps with another union in table %s", union_name, table_name) + columns_in_union.add(col) + # Now we find row_generators that overlap (by one only!) with unions: + # the columns in these generators must be null (or not null) together. + dependent_columns: dict[str, set[str]] = {} + for row_gen in get_property(table_config, "row_generators", []): + assigned = row_gen["columns_assigned"] + if type(assigned) is list: + assigned_set = set(assigned) + intersection = assigned_set.intersection(columns_in_union) + n = len(intersection) + if 1 < n: + logger.warning( + "row generator %s in table %s supplies columns for multiple unions", + row_gen["name"], + table_name, + ) + elif 1 == n: + u = intersection.pop() + dependent_columns[u] = assigned_set + # table row count: + row_count: int | None = table_stats.get("row_count", None) + # some columns have counts of nonnull values: + value_count = { col: vs.get("count", None) for (col, vs) in column_generators.items() } + # Now we can convert unions to ColumnChoices + choices: list[ColumnChoice] = [] + for cols in unions.values(): + choices.append( + ColumnChoice.make(cols, dependent_columns, row_count, value_count) + ) + return choices + + @dataclass class TableGeneratorInfo: """Contains the ssg.py content related to regular tables.""" class_name: str table_name: str - columns: list[str] + nonnull_columns: set[str] + column_choices: list[ColumnChoice] rows_per_pass: int row_gens: list[RowGeneratorInfo] = field(default_factory=list) unique_constraints: list[UniqueConstraint] = field(default_factory=list) @@ -112,7 +238,7 @@ def _get_row_generator( ) -> tuple[list[RowGeneratorInfo], list[str]]: """Get the row generators information, for the given table.""" row_gen_info: list[RowGeneratorInfo] = [] - config: list[dict[str, Any]] = get_property(table_config, "row_generators", {}) + config: list[dict[str, Any]] = get_property(table_config, "row_generators", []) columns_covered = [] for gen_conf in config: name: str = gen_conf["name"] @@ -430,10 +556,15 @@ def _get_generator_for_table( *primary_keys, name=f"{table.name}_primary_key" )) + column_choices = make_column_choices(table.name, table_config, src_stats) + nonnull_columns={str(col.name) for col in table.columns} + for cc in column_choices: + nonnull_columns.difference_update(cc.all_columns()) table_data: TableGeneratorInfo = TableGeneratorInfo( table_name=table.name, class_name=table.name.title() + "Generator", - columns=[str(col.name) for col in table.columns], + nonnull_columns=nonnull_columns, + column_choices=column_choices, rows_per_pass=get_property(table_config, "num_rows_per_pass", 1), unique_constraints=unique_constraints, ) @@ -441,7 +572,8 @@ def _get_generator_for_table( row_gen_info_data, columns_covered = _get_row_generator(table_config) table_data.row_gens.extend(row_gen_info_data) - generic_generators = get_property(src_stats, "_sqlsynthgen_generic", {}).get(table.name, {}) + generic_generators = get_property(src_stats, "_sqlsynthgen_generic", {} + ).get(table.name, {}).get("column_generators", {}) for column in table.columns: if column.name not in columns_covered: # No generator for this column in the user config. @@ -711,38 +843,40 @@ def fit_error(test, actual): } -async def make_src_stats( - dsn: str, config: Mapping, metadata: MetaData, schema_name: Optional[str] = None -) -> dict[str, list[dict]]: - """Run the src-stats queries specified by the configuration. +class DbConnection: + def __init__(self, engine): + self._engine = engine - Query the src database with the queries in the src-stats block of the `config` - dictionary, using the differential privacy parameters set in the `smartnoise-sql` - block of `config`. Record the results in a dictionary and returns it. - Args: - dsn: database connection string - config: a dictionary with the necessary configuration - schema_name: name of the database schema + async def __aenter__(self): + if isinstance(self._engine, AsyncEngine): + self._connection = await self._engine.connect() + else: + self._connection = self._engine.connect() + return self - Returns: - The dictionary of src-stats. - """ - use_asyncio = config.get("use-asyncio", False) - engine = create_db_engine(dsn, schema_name=schema_name, use_asyncio=use_asyncio) + async def __aexit__(self, _type, _value, _tb): + if isinstance(self._engine, AsyncEngine): + await self._connection.close() + else: + self._connection.close() - async def execute_raw_query(query: str): - if isinstance(engine, AsyncEngine): - async with engine.connect() as conn: - return await conn.execute(query) + async def execute_raw_query(self, query): + if isinstance(self._engine, AsyncEngine): + return await self._connection.execute(query) else: - with engine.connect() as conn: - return conn.execute(query) + return self._connection.execute(query) + + async def table_row_count(self, table_name: str): + with await self.execute_raw_query( + text(f"SELECT COUNT(*) FROM {table_name}") + ) as result: + return result.scalar_one() - async def execute_query(query_block: Mapping[str, Any]) -> Any: + async def execute_query(self, query_block: Mapping[str, Any]) -> Any: """Execute query in query_block.""" logger.debug("Executing query %s", query_block["name"]) query = text(query_block["query"]) - raw_result = execute_raw_query(query) + raw_result = self.execute_raw_query(query) if "dp-query" in query_block: result_df = pd.DataFrame(raw_result.mappings()) @@ -763,9 +897,32 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: ] return final_result + +async def make_src_stats( + dsn: str, config: Mapping, metadata: MetaData, schema_name: Optional[str] = None +) -> dict[str, list[dict]]: + """Run the src-stats queries specified by the configuration. + + Query the src database with the queries in the src-stats block of the `config` + dictionary, using the differential privacy parameters set in the `smartnoise-sql` + block of `config`. Record the results in a dictionary and return it. + Args: + dsn: database connection string + config: a dictionary with the necessary configuration + schema_name: name of the database schema + + Returns: + The dictionary of src-stats. + """ + use_asyncio = config.get("use-asyncio", False) + engine = create_db_engine(dsn, schema_name=schema_name, use_asyncio=use_asyncio) + async with DbConnection(engine) as db_conn: + return await make_src_stats_connection(config, db_conn, metadata) + +async def make_src_stats_connection(config: Mapping, db_conn: DbConnection, metadata: MetaData): query_blocks = config.get("src-stats", []) results = await asyncio.gather( - *[execute_query(query_block) for query_block in query_blocks] + *[db_conn.execute_query(query_block) for query_block in query_blocks] ) src_stats = { query_block["name"]: result @@ -778,7 +935,13 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: generic = {} tables_config = config.get("tables", {}) - for table_name, table in metadata.tables.items(): + for table_name0, table in metadata.tables.items(): + table_name = str(table_name0) + row_count = await db_conn.table_row_count(table_name) + generic[table_name] = { + "row_count": row_count, + "column_generators": {}, + } table_config = tables_config.get(table_name, None) vocab_columns = set() if table_config is None else set(table_config.get("vocabulary_columns", [])) for column_name, column in table.columns.items(): @@ -788,92 +951,116 @@ async def execute_query(query_block: Mapping[str, Any]) -> Any: if not column.foreign_keys and not column.primary_key and info is not None: if info.numeric: # Find summary information; mean, standard deviation and buckets 1/2 standard deviation width around mean. - results = await execute_raw_query(text( - "SELECT AVG({column}) AS mean, STDDEV({column}) AS sd, COUNT({column}) AS count FROM {table}".format( - column=column_name, table=table_name - ) - )) - result = results.first() - count = result.count - if result.sd is not None and not math.isnan(result.sd) and 0 < result.sd: - raw_buckets = await execute_raw_query(text( - "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( - column=column_name, table=table_name, x=result.mean - 2 * result.sd, w = result.sd / 2 - ) - )) - buckets = [0] * 10 - for rb in raw_buckets: - if rb.b is not None: - bucket = min(9, max(0, int(rb.b) + 1)) - buckets[bucket] += rb.f / count - best_fit = None - best_fit_distribution = None - best_fit_info = None - for dist_name, dist_info in _CDF_BUCKETS.items(): - fit = fit_error(dist_info["buckets"], buckets) - if best_fit is None or fit < best_fit: - best_fit = fit - best_fit_distribution = dist_name - best_fit_info = dist_info - best_generic_generator = { - "name": best_fit_distribution, - "fit": best_fit, - "kwargs": best_fit_info["kwarg_fn"](float(result.mean), float(result.sd)), - } + best_generic_generator = await _get_generic_numeric_generator( + db_conn, + column_name, + table_name, + ) if info.choice and is_vocab: # If it's not a vocabulary column then it's less useful to work out the choice distribution # Find information on how many of each example there is - results = await execute_raw_query(text( - "SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC".format( - column=column_name, table=table_name - ) - )) - values = [] - counts = [] - total = 0 - for result in results: - c = result.f - if c != 0: - total += c - counts.append(c) - v = result.v - if type(v) is decimal.Decimal: - v = float(v) - values.append(v) - if counts: - total2 = total * total - # Which distribution fits best? - zipf = zipf_distribution(total, len(counts)) - zipf_fit = fit_error(zipf, counts) / total2 - unif = uniform_distribution(total, len(counts)) - unif_fit = fit_error(unif, counts) / total2 - if best_generic_generator is None or zipf_fit < best_generic_generator["fit"]: - best_generic_generator = { - "name": "dist_gen.zipf_choice", - "fit": zipf_fit, - "kwargs": { - "a": values, - "n": f"{len(counts)}", - } - } - if best_generic_generator is None or unif_fit < best_generic_generator["fit"]: - best_generic_generator = { - "name": "dist_gen.choice", - "fit": unif_fit, - "kwargs": { - "a": values, - } - } + gg = await _get_generic_choice_generator( + db_conn, + column_name, + table_name, + ) + if best_generic_generator is None or ( + gg is not None and gg["fit"] < best_generic_generator["fit"] + ): + best_generic_generator = gg if info.summary_query is not None: - results = await execute_raw_query(text(info.summary_query.format( + # Run specified query + results = await db_conn.execute_raw_query(text(info.summary_query.format( column=column_name, table=table_name ))) kw = get_result_mappings(info, results) if kw is not None: best_generic_generator = { "name": info.generator, "kwargs": kw } if best_generic_generator is not None: - if table_name not in generic: - generic[str(table_name)] = {} - generic[str(table_name)][str(column_name)] = best_generic_generator + generic[table_name]["column_generators"][str(column_name)] = best_generic_generator if generic: src_stats["_sqlsynthgen_generic"] = generic return src_stats + + +async def _get_generic_choice_generator(db_conn, column_name, table_name): + results = await db_conn.execute_raw_query(text( + "SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC".format( + column=column_name, table=table_name + ) + )) + values = [] # The values found + counts = [] # The number or each value + total = 0 # total number of non-NULL results + for result in results: + c = result.f + if c != 0: + total += c + counts.append(c) + v = result.v + if type(v) is decimal.Decimal: + v = float(v) + values.append(v) + if not counts: + return None + total2 = total * total + # Which distribution fits best? + zipf = zipf_distribution(total, len(counts)) + zipf_fit = fit_error(zipf, counts) / total2 + unif = uniform_distribution(total, len(counts)) + unif_fit = fit_error(unif, counts) / total2 + if zipf_fit < unif_fit: + return { + "name": "dist_gen.zipf_choice", + "fit": zipf_fit, + "count": total, + "kwargs": { + "a": values, + "n": f"{len(counts)}", + } + } + return { + "name": "dist_gen.choice", + "fit": unif_fit, + "count": total, + "kwargs": { + "a": values, + } + } + + +async def _get_generic_numeric_generator(db_conn, column_name, table_name): + # Find summary information; mean, standard deviation and buckets 1/2 standard deviation width around mean. + results = await db_conn.execute_raw_query(text( + "SELECT AVG({column}) AS mean, STDDEV({column}) AS sd, COUNT({column}) AS count FROM {table}".format( + column=column_name, table=table_name + ) + )) + result = results.first() + count = result.count + if result.sd is not None and not math.isnan(result.sd) and 0 < result.sd: + raw_buckets = await db_conn.execute_raw_query(text( + "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( + column=column_name, table=table_name, x=result.mean - 2 * result.sd, w = result.sd / 2 + ) + )) + buckets = [0] * 10 + for rb in raw_buckets: + if rb.b is not None: + bucket = min(9, max(0, int(rb.b) + 1)) + buckets[bucket] += rb.f / count + best_fit = None + best_fit_distribution = None + best_fit_info = None + for dist_name, dist_info in _CDF_BUCKETS.items(): + fit = fit_error(dist_info["buckets"], buckets) + if best_fit is None or fit < best_fit: + best_fit = fit + best_fit_distribution = dist_name + best_fit_info = dist_info + best_generic_generator = { + "name": best_fit_distribution, + "fit": best_fit, + "count": count, + "kwargs": best_fit_info["kwarg_fn"](float(result.mean), float(result.sd)), + } + return best_generic_generator diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index 44070280..2ee16fd7 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -1,7 +1,6 @@ """Utility functions.""" import json import logging -import os import sys from importlib import import_module import importlib.util From cbd6e050d4d45f032147ac45815ced5dbf468f31 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 13 Feb 2025 18:19:27 +0000 Subject: [PATCH 27/85] and template changes --- sqlsynthgen/templates/ssg.py.j2 | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/sqlsynthgen/templates/ssg.py.j2 b/sqlsynthgen/templates/ssg.py.j2 index 1114d184..e84c63df 100644 --- a/sqlsynthgen/templates/ssg.py.j2 +++ b/sqlsynthgen/templates/ssg.py.j2 @@ -10,8 +10,12 @@ numeric = Numeric() person = Person() dist_gen = DistributionGenerator() +from sqlsynthgen.providers import ( +{% for provider_import in provider_imports %} + {{ provider_import }}{%- if not loop.last %},{%- endif %} +{% endfor %} +) {% for provider_import in provider_imports %} -from sqlsynthgen.providers import {{ provider_import }} generic.add_provider({{ provider_import }}) {% endfor %} @@ -41,7 +45,7 @@ class {{ table_data.class_name }}(TableGenerator): def __init__(self): self.initialized = False - def __call__(self, dst_db_conn): + def __call__(self, dst_db_conn, get_random): if not self.initialized: {% for constraint in table_data.unique_constraints %} query_text = f"SELECT {% @@ -57,7 +61,14 @@ class {{ table_data.class_name }}(TableGenerator): {% endfor %} self.initialized = True result = {} - columns_to_generate = set({{ table_data.columns }}) + columns_to_generate = set({{ table_data.nonnull_columns }}) + {% for choice in table_data.column_choices %} + p = get_random() + {% for probability, columns in choice.options %} + {%- if not loop.first %}el{%- endif %}if p < {{ probability }}: + columns_to_generate.update(set({{ columns }})) + {% endfor %} + {% endfor %} {% if max_unique_constraint_tries is not none %} max_tries={{max_unique_constraint_tries}}, {% endif %} From 20b59fbd0e1b67e4ff6d12f1b29da515c8b2f2a1 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 14 Feb 2025 15:15:50 +0000 Subject: [PATCH 28/85] primary keys now start after all existing keys --- sqlsynthgen/make.py | 25 ++++++++----------------- sqlsynthgen/providers.py | 19 +++++++++++++++++-- sqlsynthgen/utils.py | 2 +- 3 files changed, 26 insertions(+), 20 deletions(-) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index eb2c94af..4b1fb5f8 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -118,7 +118,7 @@ def make( total_value_count = 0 counted_column_count = 0 for col in cols: - vc = value_count[col] + vc = value_count.get(col, None) if vc is not None: counted_column_count += 1 total_value_count += vc @@ -351,8 +351,9 @@ def _integer_generator(column: Column) -> tuple[str, dict[str, str]]: """ if not column.primary_key: return ("generic.numeric.integer_number", {}) - return ("numeric.increment", { - "accumulator": f'"{column.table.fullname}.{column.name}"' + return ("generic.column_value_provider.increment", { + "db_connection": "dst_db_conn", + "column": f'metadata.tables["{column.table.name}"].columns["{column.name}"]', }) @@ -661,13 +662,7 @@ def make_table_generators( # pylint: disable=too-many-locals """ row_generator_module_name: str = config.get("row_generators_module", None) story_generator_module_name = config.get("story_generators_module", None) - - settings = get_settings() - src_dsn: str = settings.src_dsn or "" - assert src_dsn != "", "Missing SRC_DSN setting." - tables_config = config.get("tables", {}) - engine = get_sync_engine(create_db_engine(src_dsn, schema_name=settings.src_schema)) src_stats = {} if src_stats_filename: @@ -689,9 +684,7 @@ def make_table_generators( # pylint: disable=too-many-locals related_non_vocab ) vocabulary_tables.append( - _get_generator_for_existing_vocabulary_table( - table, engine - ) + _get_generator_for_existing_vocabulary_table(table) ) else: tables.append(_get_generator_for_table( @@ -728,14 +721,11 @@ def generate_ssg_content(template_context: Mapping[str, Any]) -> str: ) ssg_template: Template = environment.get_template(SSG_TEMPLATE_FILENAME) template_output: str = ssg_template.render(template_context) - return format_str(template_output, mode=FileMode()) def _get_generator_for_existing_vocabulary_table( table: Table, - engine: Engine, - table_file_name: Optional[str] = None, ) -> VocabularyTableGeneratorInfo: """ Turns an existing vocabulary YAML file into a VocabularyTableGeneratorInfo. @@ -1037,6 +1027,7 @@ async def _get_generic_numeric_generator(db_conn, column_name, table_name): )) result = results.first() count = result.count + generator = None if result.sd is not None and not math.isnan(result.sd) and 0 < result.sd: raw_buckets = await db_conn.execute_raw_query(text( "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( @@ -1057,10 +1048,10 @@ async def _get_generic_numeric_generator(db_conn, column_name, table_name): best_fit = fit best_fit_distribution = dist_name best_fit_info = dist_info - best_generic_generator = { + generator = { "name": best_fit_distribution, "fit": best_fit, "count": count, "kwargs": best_fit_info["kwarg_fn"](float(result.mean), float(result.sd)), } - return best_generic_generator + return generator diff --git a/sqlsynthgen/providers.py b/sqlsynthgen/providers.py index 725cbb8b..6b26ab5d 100644 --- a/sqlsynthgen/providers.py +++ b/sqlsynthgen/providers.py @@ -5,8 +5,8 @@ from mimesis import Datetime, Text from mimesis.providers.base import BaseDataProvider, BaseProvider -from sqlalchemy import Connection -from sqlalchemy.sql import functions, select +from sqlalchemy import Connection, Column +from sqlalchemy.sql import functions, select, func class ColumnValueProvider(BaseProvider): @@ -29,6 +29,21 @@ def column_value( return getattr(random_row, column_name) return None + def __init__(self, *, seed = None, **kwargs): + super().__init__(seed=seed, **kwargs) + self.accumulators: dict[str, int] = {} + + def increment(self, db_connection: Connection, column: Column) -> int: + """ Return incrementing value for the column specified. """ + name = f"{column.table.name}.{column.name}" + result = self.accumulators.get(name, None) + if result == None: + row = db_connection.execute(select(func.max(column))).first() + result = 1 if row is None else row[0] + value = result + 1 + self.accumulators[name] = value + return value + class BytesProvider(BaseDataProvider): """A Mimesis provider of binary data.""" diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index 2ee16fd7..fe90e529 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -306,7 +306,7 @@ def reinstate_vocab_foreign_key_constraints(metadata, meta_dict, config, dst_eng session.execute(AddConstraint(fk)) session.commit() except IntegrityError: - logger.exception("Restoring table %s foreign keys failed:", vocab_table) + logger.exception("Restoring table %s foreign keys failed:", vocab_table_name) def stream_yaml(yaml_file_handle): From 0883fa8de8f8e5eaa2b39707e34dd06d31b8f127 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 14 Feb 2025 18:46:04 +0000 Subject: [PATCH 29/85] Fixed ssg template choice stuff --- sqlsynthgen/templates/ssg.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sqlsynthgen/templates/ssg.py.j2 b/sqlsynthgen/templates/ssg.py.j2 index e84c63df..0b0382a3 100644 --- a/sqlsynthgen/templates/ssg.py.j2 +++ b/sqlsynthgen/templates/ssg.py.j2 @@ -65,7 +65,7 @@ class {{ table_data.class_name }}(TableGenerator): {% for choice in table_data.column_choices %} p = get_random() {% for probability, columns in choice.options %} - {%- if not loop.first %}el{%- endif %}if p < {{ probability }}: + {%+ if not loop.first %}el{%+ endif %}if p < {{ probability }}: columns_to_generate.update(set({{ columns }})) {% endfor %} {% endfor %} From 1e3ca3259d6947f1de7599ce6d76d3d65219dae7 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Mon, 24 Feb 2025 18:44:33 +0000 Subject: [PATCH 30/85] Generating config.yaml and making it mandatory as input --- sqlsynthgen/json_schemas/config_schema.json | 21 +++++++++ sqlsynthgen/main.py | 47 ++++++++++++++++----- sqlsynthgen/make.py | 20 +++++++++ 3 files changed, 78 insertions(+), 10 deletions(-) diff --git a/sqlsynthgen/json_schemas/config_schema.json b/sqlsynthgen/json_schemas/config_schema.json index 3bf74f70..40070061 100644 --- a/sqlsynthgen/json_schemas/config_schema.json +++ b/sqlsynthgen/json_schemas/config_schema.json @@ -185,6 +185,27 @@ } } } + }, + "unions": { + "description": "Groups of columns that represent different representations of the same value.", + "type": "object", + "patternProperties": { + ".*": { + "type": "array", + "items": { + "description": "Column name.", + "type": "string" + } + } + } + }, + "vocabulary_columns": { + "description": "Columns whose set of possible values is not considered private and so can be reproduced in the output database.", + "type": "array", + "items": { + "description": "Column name.", + "type": "string" + } } } } diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 4c0e43d4..102fc6e6 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -17,6 +17,7 @@ make_table_generators, make_tables_file, make_vocabulary_tables, + generate_config_file, ) from sqlsynthgen.remove import remove_db_data, remove_db_tables, remove_db_vocab from sqlsynthgen.settings import Settings, get_settings @@ -35,6 +36,7 @@ # pylint: disable=too-many-arguments ORM_FILENAME: Final[str] = "orm.yaml" +CONFIG_FILENAME: Final[str] = "config.yaml" SSG_FILENAME: Final[str] = "ssg.py" STATS_FILENAME: Final[str] = "src-stats.yaml" @@ -93,7 +95,7 @@ def create_data( SSG_FILENAME, help="The name of the generators file. Must be in the current working directory." ), - config_file: Optional[str] = Option(None, help="The configuration file"), + config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), num_passes: int = Option(1, help="Number of passes (rows or stories) to make"), ) -> None: """Populate the schema in the target directory with synthetic data. @@ -141,7 +143,7 @@ def create_data( @app.command() def create_vocab( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), - config_file: str = Option(None, help="The configuration file"), + config_file: str = Option(CONFIG_FILENAME, help="The configuration file"), ) -> None: """Import vocabulary data into the target database. @@ -160,7 +162,7 @@ def create_vocab( @app.command() def create_tables( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), - config_file: Optional[str] = Option(None, help="The configuration file"), + config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), ) -> None: """Create schema from the ORM YAML file. @@ -180,7 +182,7 @@ def create_tables( @app.command() def make_vocab( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), - config_file: Optional[str] = Option(None, help="The configuration file"), + config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), force: bool = Option(True, help="Overwrite any existing vocabulary file."), compress: bool = Option(False, help="Compress file to .gz"), ) -> None: @@ -208,7 +210,7 @@ def make_vocab( def make_generators( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), ssg_file: str = Option(SSG_FILENAME, help="Path to write Python generators to."), - config_file: Optional[str] = Option(None, help="The configuration file"), + config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), stats_file: Optional[str] = Option(None, help="Statistics file (output of make-stats)"), force: bool = Option(False, help="Overwrite any existing Python generators file."), ) -> None: @@ -244,7 +246,7 @@ def make_generators( @app.command() def make_stats( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), - config_file: Optional[str] = Option(None, help="The configuration file"), + config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), stats_file: str = Option(STATS_FILENAME), force: bool = Option(False, help="Overwrite any existing vocabulary file."), ) -> None: @@ -276,7 +278,7 @@ def make_stats( @app.command() def make_tables( - config_file: Optional[str] = Option(None, help="The configuration file"), + config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), orm_file: str = Option(ORM_FILENAME, help="Path to write the ORM yaml file to"), force: bool = Option(False, help="Overwrite any existing orm yaml file."), ) -> None: @@ -300,6 +302,31 @@ def make_tables( logger.debug("%s created.", orm_file) +@app.command() +def generate_config( + config_file: Optional[str] = Option(CONFIG_FILENAME, help="Path to write the configuration file to"), + force: bool = Option(False, help="Overwrite any existing configuration yaml file"), +) -> None: + """ + Generate a basic configuration file. + + The configuration produced just includes default configuration for the + existing source database tables. + """ + logger.debug("Creating %s.", config_file) + + config_file_path = Path(config_file) + if not force: + _check_file_non_existence(config_file_path) + + settings = get_settings() + src_dsn: str = _require_src_db_dsn(settings) + + content = generate_config_file(src_dsn, settings.src_schema) + config_file_path.write_text(content, encoding="utf-8") + logger.debug("%s created.", config_file) + + @app.command() def validate_config( config_file: Path = Argument(help="The configuration file to validate"), @@ -320,7 +347,7 @@ def validate_config( @app.command() def remove_data( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), - config_file: Optional[str] = Option(None, help="The configuration file"), + config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), yes: bool = Option(False, "--yes", prompt="Are you sure?", help="Just remove, don't ask first"), ) -> None: """Truncate non-vocabulary tables in the destination schema.""" @@ -337,7 +364,7 @@ def remove_data( @app.command() def remove_vocab( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), - config_file: Optional[str] = Option(None, help="The configuration file"), + config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), yes: bool = Option(False, "--yes", prompt="Are you sure?", help="Just remove, don't ask first"), ) -> None: """Truncate vocabulary tables in the destination schema.""" @@ -355,7 +382,7 @@ def remove_vocab( @app.command() def remove_tables( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), - config_file: Optional[str] = Option(None, help="The configuration file"), + config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), yes: bool = Option(False, "--yes", prompt="Are you sure?", help="Just remove, don't ask first"), ) -> None: """Drop all tables in the destination schema. diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 4b1fb5f8..a2dc4346 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -756,6 +756,26 @@ def _generate_vocabulary_table( download_table(table, engine, yaml_file_name, compress) +def generate_config_file( + db_dsn: str, schema_name: Optional[str] +) -> str: + engine = get_sync_engine(create_db_engine(db_dsn, schema_name=schema_name)) + metadata = MetaData() + metadata.reflect(engine) + tables = {} + for table_name in metadata.tables.keys(): + table = { + "ignore": False, + "vocabulary_table": False, + "unions": {}, + "num_rows_per_pass": 1, + "row_generators": [], + "vocabulary_columns": [], + } + tables[table_name] = table + return yaml.dump({"tables": tables}) + + def make_tables_file( db_dsn: str, schema_name: Optional[str], config: Mapping[str, Any] ) -> str: From e9898cc231fcc0ff334a6ef684e5b340f5a28ae6 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 26 Feb 2025 17:48:08 +0000 Subject: [PATCH 31/85] Removed column uniquness, added postgres CIDR and BIT --- sqlsynthgen/serialize_metadata.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sqlsynthgen/serialize_metadata.py b/sqlsynthgen/serialize_metadata.py index 720120a2..ec037abd 100644 --- a/sqlsynthgen/serialize_metadata.py +++ b/sqlsynthgen/serialize_metadata.py @@ -94,8 +94,11 @@ def pgt_parser(): simple(sqltypes.BOOLEAN), simple(postgresql.TSVECTOR), simple(postgresql.BYTEA), + simple(postgresql.CIDR), numeric_type(sqltypes.NUMERIC), numeric_type(sqltypes.DECIMAL), + numeric_type(postgresql.BIT), + numeric_type(postgresql.REAL), string_type(sqltypes.CHAR), string_type(sqltypes.NCHAR), string_type(sqltypes.VARCHAR), @@ -128,7 +131,6 @@ def column_to_dict(column: Column, dialect: Dialect) -> str: "type": compiled, "primary": column.primary_key, "nullable": column.nullable, - "unique": column.unique, } foreign_keys = [str(fk.target_fullname) for fk in column.foreign_keys] if foreign_keys: @@ -159,7 +161,6 @@ def dict_to_column(table_name, col_name, rep: dict) -> Column: type_=type_, primary_key=rep.get("primary", False), nullable=rep.get("nullable", None), - unique=rep.get("unique", None), ) def dict_to_unique(rep: dict) -> schema.UniqueConstraint: From 4eb74655d780f4f36203da29a32a4e22444d48d8 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 27 Feb 2025 17:19:50 +0000 Subject: [PATCH 32/85] Documentation and minor fixes --- docs/source/configuration.rst | 5 + docs/source/index.rst | 7 - docs/source/installation.rst | 2 +- docs/source/introduction.rst | 206 +++++++++++++++++++++++++ sqlsynthgen/make.py | 6 +- sqlsynthgen/providers.py | 2 +- sqlsynthgen/templates/ssg.py.j2 | 2 +- tests/examples/example_orm.yaml | 265 ++++++++++++++++++++++++++++++++ 8 files changed, 484 insertions(+), 11 deletions(-) create mode 100644 tests/examples/example_orm.yaml diff --git a/docs/source/configuration.rst b/docs/source/configuration.rst index 76a97ab6..718039ea 100644 --- a/docs/source/configuration.rst +++ b/docs/source/configuration.rst @@ -4,6 +4,11 @@ Configuration Reference SqlSynthGen is configured using a YAML file, which is passed to several commands with the ``--config-file`` option. Throughout the docs, we will refer to this file as ``config.yaml`` but it can be called anything (the exception being that there will be a naming conflict if you have a vocabulary table called ``config``). +You can generate an example configuration file, based on your source database and filled with only default values (therefore you can safely delete any parts of the generated configuration file you don't need) like this: + +.. code-block:: shell + sqlsynthgen generate-config + Below, we see the schema for the configuration file. Note that our config file format includes a section of SmartNoise SQL metadata, which is explained more fully `here `_. diff --git a/docs/source/index.rst b/docs/source/index.rst index 05553b6a..5ff427f5 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -14,13 +14,6 @@ The latter also goes through some more advanced features of SSG and how to use t This project will be under active development from Jan - Oct 2023 - -.. note:: - - We do not currently support tables without primary keys. - If you have tables without primary keys, some sqlsynthgen functionality - may work but vocabulary tables will not. - Contents: --------- diff --git a/docs/source/installation.rst b/docs/source/installation.rst index 1c64804f..7e7e56c8 100644 --- a/docs/source/installation.rst +++ b/docs/source/installation.rst @@ -7,7 +7,7 @@ To use SqlSynthGen, first install it: .. code-block:: console - $ pip install sqlsynthgen + $ pipx install git+https://github.com/tim-band/sqlsynthgen Check that you can view the help message with: diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst index 9fd3408e..2308c3b6 100644 --- a/docs/source/introduction.rst +++ b/docs/source/introduction.rst @@ -3,6 +3,212 @@ Introductory Tutorial ============================== +Let us begin with a simple movie rental database called `Pagila `_. Follow the instructions there to create a PostgreSQL database if you want to follow this tutorial along. +Pagila is already fake data, but we shall pretend that it has sensitive data in it, and we are attempting to keep this data secure. +We will imagine we have a strict protocol to follow to keep the data safe, and that the source database is only accessible from a private network. + +You can give access to this database to a different user (I'm using ``tim``) like this: + +.. code-block:: console + + $ sudo -u postgres psql pagila + pagila=# grand pg_read_all_data to tim; + pagila=# \q + +Minimal example +--------------- + +Let us begin in the private network that this sensitive data resides in (well, let us pretend anyway). + +We being by setting the database connection information +(you don't need to set ``SRC_SCHEMA`` if the schema is the default, but for explicitness we do here), +and creating the configuration, ORM and initial statistics files +(here we are imagining the username is ``postgres`` and the password is ``password`` -- change ``postgres:password`` to the username and password you used to set up the database): + +.. code-block:: shell + + export SRC_DSN='postgresql://postgres:password@localhost/pagila' + export SRC_SCHEMA='public' + sqlsynthgen generate-config + sqlsynthgen make-tables + sqlsynthgen make-stats + +This generates the files ``config.yaml``, ``orm.yaml`` and ``src-stats.yaml``. + +Now we examine these files for evidence of sensitive information. +There should be none, but any lines that are considered sensitive can be removed +(as long as the file remains a YAML file!) before taking these files out of the private network. + +Now outside of the private network we have these three files, and we can generate a new database. +Let us first create a new database within PostgreSQL. +Here we are using user ``tim`` and the default schema ``public``: + +.. code-block:: console + + $ sudo -u postgres psql + postgres=# create database fake_pagila; + CREATE DATABASE + postgres=# grant all privileges on database fake_pagila to tim; + GRANT + postgres=# exit + $ sudo -u postgres psql fake_pagila + fake_pagila=# grant all privileges on schema public to tim; + GRANT + fake_pagila=# exit + +And let's populate it with the fake data: + +.. code-block:: shell + + export DST_DSN='postgresql://tim:password@localhost/fake_pagila' + export DST_SCHEMA='public' + sqlsynthgen make-generators + sqlsynthgen create-tables + sqlsynthgen create-data + +``make-generators`` creates a Python file called ``ssg.py``. +You can edit this file if you want, but it is much easier to edit ``config.yaml`` and call ``sqlsynthgen make-generators --force`` to regenerate this file. + +You will notice that ``create-tables`` produces a couple of warnings, and PostgreSQL complains when ``sqlsynthgen`` tries to create the data. +The warnings are that ``sqlsynthgen`` doesn't understand the special PostgresSQL types ``TSVECTOR`` and ``ARRAY``, so it doesn't know how to generate data for those columns. +Because it doesn't know how to generate data for those columns it will just use NULLs, and the ``film.fulltext`` column cannot be NULL, so creating the data fails. + +Fixing the errors with the minimal example +------------------------------------------ + +Now let us add text to the ``film.fulltext`` column. Find the ``film`` section and alter it like so: + +.. code-block:: yaml + + film: + row_generators: + - name: generic.text.text + columns_assigned: fulltext + +Also, while we are at it let's give the actors sensible names: + +.. code-block:: yaml + + actor: + row_generators: + - name: generic.person.first_name + columns_assigned: first_name + - name: generic.person.last_name + columns_assigned: last_name + +We can see that we are setting the column we want changed with the ``columns_assigned`` property, but what does this ``name`` property mean? +This is a Python function that generates the random data for us. +``generic.`` refers to the Mimesis `Generic provider `_ that combines all the other Mimesis providers. +These all use the ``EN_GB`` locale, which currently cannot be changed. +Some examples of useful providers you can use are: +- `generic.text. `_ generates words, sentences, colours and more. +- `generic.datetime. `_ generates dates, day names, times and so on. +- `generic.person. `_ generates first and last names, genders, heights, occupations and so on. + +Some of these functions take arguments, that we can assign like this: + +.. code-block:: yaml + + customer: + row_generators: + - name: generic.person.email + kwargs: + domains: + - gmail.com + - ucl.ac.uk + unique: true + columns_assigned: email + +(but only static booleans, strings or numbers) + +Anyway, we now need to remake the generators (``make-generators``) and re-run them (``create-data``): + +.. code-block:: console + $ sqlsynthgen make-generators --force + $ sqlsynthgen create-data --num-passes 15 + +Now you can use ``psql --username tim fake_pagila`` to explore the data. + +You will see that almost all of the columns have correctly-typed data in it. +All the foreign keys point to existing rows in the correct table without our having to do anything, +but also our nice new generators are working: +Our ``actor`` table has nice names in it, and our ``film`` table has text in the ``fulltext`` column +(albeit text that does not seem to describe films). + +Problems with the minimal example +--------------------------------- + +But here is a non-exhaustive list of issues with the data produced: + +- all text fields are just colours, for example: + - staff names (we can deal with this the same way we dealt with actors names above). + - address lines. + - movie categories. + - city, country and language names. +- there are a lot of payment tables that are partitions of the + main payment table in the source database, but these are + just different tables in the generated table. + +Fixing the problems with the minimal example #1: ignoring unwanted tables +------------------------------------------------------------------------- + +We fix these problems by adjusting the ``config.yaml`` file. +We do not need to go back to the private network. +First, let us remove all the ``payment_`` tables. +This lowers the fidelity of the generated database, but ``sqlsynthgen`` cannot cope with partitioned tables +so the best that we can do is pretend that ``payment`` is not a partitioned table. +If we think that our users will not be interested in this implementation detail then this will be acceptable. +So we edit the appropriate parts of the ``config.yaml`` file. You will see seven sections that look like this: + +.. code-block:: yaml + + payment_p2022_01: + ignore: false + num_rows_per_pass: 1 + row_generators: [] + unions: {} + vocabulary_columns: [] + vocabulary_table: false + +We need to change ``ignore: false`` to ``ignore: true``, and we can delete the other lines in these blocks if we like: + +.. code-block:: yaml + + payment_p2022_01: + ignore: true + payment_p2022_02: + ignore: true + payment_p2022_03: + ignore: true + payment_p2022_04: + ignore: true + payment_p2022_05: + ignore: true + payment_p2022_06: + ignore: true + payment_p2022_07: + ignore: true + +Now we can destroy the existing database and try again: + +.. code-block:: shell + + sqlsynthgen remove-tables --yes + sqlsynthgen create-tables + sqlsynthgen create-data + +We don't need to regenerate the generators this time as we have not changed anything in the ``config.yaml`` file that affects generators. + +Fixing the problems with the minimal example #2: generate vocabularies +---------------------------------------------------------------------- + +While we could try to generate random plausible language, country, city and film category names, there is a better way. +As these tables hold no sensitive data, we can just copy them. +To do this, we need to change the ``config.yaml`` file and go back to the private network. +... + +More In-Depth Tutorial +====================== `SqlSynthGen `_, or SSG for short, is a software package for synthetic data generation, focussed on relational data. When pointed to an existing relational database, SSG creates another database with the same database schema, and populates it with synthetic data. diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index a2dc4346..0a527df7 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -652,7 +652,10 @@ def make_table_generators( # pylint: disable=too-many-locals generated (by make-tables and make-vocab). Args: + metadata: database ORM config: Configuration to control the generator creation. + orm_filename: "orm.yaml" file path so that the generator file can load the MetaData object + config_filename: "config.yaml" file path so that the generator file can load the MetaData object src_stats_filename: A filename for where to read src stats from. Optional, if `None` this feature will be skipped overwrite_files: Whether to overwrite pre-existing vocabulary files @@ -700,7 +703,7 @@ def make_table_generators( # pylint: disable=too-many-locals { "provider_imports": PROVIDER_IMPORTS, "orm_file_name": orm_filename, - "config_file_name": repr(config_filename), + "config_file_name": config_filename, "row_generator_module_name": row_generator_module_name, "story_generator_module_name": story_generator_module_name, "src_stats_filename": src_stats_filename, @@ -919,6 +922,7 @@ async def make_src_stats( Args: dsn: database connection string config: a dictionary with the necessary configuration + metadata: the database ORM schema_name: name of the database schema Returns: diff --git a/sqlsynthgen/providers.py b/sqlsynthgen/providers.py index 6b26ab5d..54afd529 100644 --- a/sqlsynthgen/providers.py +++ b/sqlsynthgen/providers.py @@ -39,7 +39,7 @@ def increment(self, db_connection: Connection, column: Column) -> int: result = self.accumulators.get(name, None) if result == None: row = db_connection.execute(select(func.max(column))).first() - result = 1 if row is None else row[0] + result = 0 if row is None or row[0] is None else row[0] value = result + 1 self.accumulators[name] = value return value diff --git a/sqlsynthgen/templates/ssg.py.j2 b/sqlsynthgen/templates/ssg.py.j2 index 0b0382a3..1d5bff20 100644 --- a/sqlsynthgen/templates/ssg.py.j2 +++ b/sqlsynthgen/templates/ssg.py.j2 @@ -19,7 +19,7 @@ from sqlsynthgen.providers import ( generic.add_provider({{ provider_import }}) {% endfor %} -metadata = load_metadata("{{ orm_file_name }}", {{ config_file_name }}) +metadata = load_metadata("{{ orm_file_name }}", "{{ config_file_name }}") {% if row_generator_module_name is not none %} import {{ row_generator_module_name }} diff --git a/tests/examples/example_orm.yaml b/tests/examples/example_orm.yaml new file mode 100644 index 00000000..461e3c54 --- /dev/null +++ b/tests/examples/example_orm.yaml @@ -0,0 +1,265 @@ +dsn: postgresql://tim:tim@localhost/src +schema: null +tables: + concept: + columns: + concept_id: + nullable: false + primary: true + type: INTEGER + concept_name: + nullable: false + primary: false + type: TEXT + concept_type_id: + foreign_keys: + - concept_type.id + nullable: true + primary: false + type: INTEGER + concept_valid_from: + nullable: false + primary: false + type: TIMESTAMP WITH TIME ZONE + schema: null + unique: + - columns: + - concept_name + name: concept_name_uniq + concept_type: + columns: + id: + nullable: false + primary: true + type: INTEGER + lucky_number: + nullable: true + primary: false + type: INTEGER + mitigation_type_id: + foreign_keys: + - mitigation_type.id + nullable: true + primary: false + type: INTEGER + name: + nullable: false + primary: false + type: TEXT + schema: null + unique: [] + data_type_test: + columns: + myuuid: + nullable: false + primary: false + type: UUID + schema: null + unique: [] + empty_vocabulary: + columns: + entry_id: + nullable: false + primary: true + type: INTEGER + entry_name: + nullable: false + primary: false + type: TEXT + schema: null + unique: [] + hospital_visit: + columns: + hospital_visit_id: + nullable: false + primary: true + type: BIGINT + person_id: + foreign_keys: + - person.person_id + nullable: false + primary: false + type: INTEGER + visit_duration_seconds: + nullable: false + primary: false + type: REAL + visit_end: + nullable: false + primary: false + type: DATE + visit_image: + nullable: false + primary: false + type: BYTEA + visit_start: + nullable: false + primary: false + type: DATE + visit_type_concept_id: + foreign_keys: + - concept.concept_id + nullable: false + primary: false + type: INTEGER + schema: null + unique: [] + mitigation_type: + columns: + description: + nullable: true + primary: false + type: TEXT + id: + nullable: false + primary: true + type: INTEGER + name: + nullable: true + primary: false + type: TEXT + schema: null + unique: [] + no_pk_test: + columns: + not_an_id: + nullable: false + primary: false + type: INTEGER + schema: null + unique: [] + person: + columns: + name: + nullable: false + primary: false + type: TEXT + person_id: + nullable: false + primary: true + type: INTEGER + research_opt_out: + nullable: false + primary: false + type: BOOLEAN + stored_from: + nullable: false + primary: false + type: TIMESTAMP WITH TIME ZONE + schema: null + unique: [] + ref_to_unignorable_table: + columns: + id: + nullable: false + primary: true + type: INTEGER + ref: + foreign_keys: + - unignorable_table.id + nullable: false + primary: false + type: INTEGER + schema: null + unique: [] + strange_type_table: + columns: + column_with_unusual_type: + nullable: true + primary: false + type: CIDR + column_with_unusual_type_and_length: + nullable: true + primary: false + type: BIT(3) + id: + nullable: false + primary: true + type: INTEGER + schema: null + unique: [] + table_to_be_ignored: + columns: + id: + nullable: false + primary: false + type: INTEGER + schema: null + unique: [] + test_entity: + columns: + single_letter_column: + nullable: true + primary: false + type: VARCHAR(1) + vocabulary_entry_id: + foreign_keys: + - empty_vocabulary.entry_id + nullable: true + primary: false + type: INTEGER + schema: null + unique: [] + unignorable_table: + columns: + id: + nullable: false + primary: true + type: INTEGER + schema: null + unique: [] + unique_constraint_test: + columns: + a: + nullable: false + primary: false + type: BOOLEAN + b: + nullable: false + primary: false + type: BOOLEAN + c: + nullable: false + primary: false + type: TEXT + id: + nullable: false + primary: true + type: INTEGER + schema: null + unique: + - columns: + - c + name: c_uniq + - columns: + - a + - b + name: ab_uniq + unique_constraint_test2: + columns: + a: + nullable: false + primary: false + type: TEXT + b: + nullable: false + primary: false + type: TEXT + c: + nullable: false + primary: false + type: TEXT + id: + nullable: false + primary: true + type: INTEGER + schema: null + unique: + - columns: + - a + - b + - c + name: abc_uniq2 + - columns: + - a + name: a_uniq2 From d28cb4639d2d97dfd2a5ea71cca9a9ac6dd43f18 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Mon, 3 Mar 2025 18:11:56 +0000 Subject: [PATCH 33/85] create-vocab can load .yaml.gz --- docs/source/introduction.rst | 43 ++++++++++++++++++++++++++++++- sqlsynthgen/base.py | 49 +++++++++++++++++++++++------------- sqlsynthgen/main.py | 7 ++++-- 3 files changed, 79 insertions(+), 20 deletions(-) diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst index 2308c3b6..5ff6b944 100644 --- a/docs/source/introduction.rst +++ b/docs/source/introduction.rst @@ -205,7 +205,48 @@ Fixing the problems with the minimal example #2: generate vocabularies While we could try to generate random plausible language, country, city and film category names, there is a better way. As these tables hold no sensitive data, we can just copy them. To do this, we need to change the ``config.yaml`` file and go back to the private network. -... + +So let us find these sections in ``config.yaml`` and change ``vocabulary_table: false`` to ``vocabulary_table:true`` +(deleting the other properties if you like): + +.. code-block:: yaml + category: + vocabulary_table: true + city: + vocabulary_table: true + country: + vocabulary_table: true + +and later (although it doesn't matter if you re-arrange the table blocks): + +.. code-block:: yaml + language: + vocabulary_table: true + +and now we take this file into the private network (or pretend to) and run (in the private network with ``SRC_DSN`` and ``SRC_SCHEMA`` set as above): + +.. code-block:: console + + $ sqlsynthgen make-vocab --compress + +This will produce four files: ``category.yaml.gz``, ``city.yaml.gz``, ``country.yaml.gz`` and ``language.yaml.gz``. +If the ``--compress`` option is not passed it will produce ``.yaml`` files instead of ``.yaml.gz`` and this would be fine in this case. +Certain databases have very large vocabulary tables, for example the ``concept`` table in OMOP databases. +Such huge YAML files can cause problems, but they compress very well, so the ``--compress`` option can be very useful for overcoming such limitations. +Generating these huge vocabulary files can nevertheless take a very long time! Not in Pagila's case, though. + +Now your data privacy protocols will either require you to unzip and examine these files before taking them out of the private network +or it will trust ``sqlsynthgen`` to produce only non-private output given certain inputs. +In either case we take these files out of the private network. + +Using the same ``config.yaml`` file outside the private network (and with ``DST_DSN`` set as above) we delete the existing data in these vocabulary tables, +and fill them with the new data from the ``yaml.gz`` (or unzipped ``.yaml``) files: + +.. code-block:: console + + $ sqlsynthgen remove-vocab + Are you sure? [y/N]: y + $ sqlsynthgen create-vocab More In-Depth Tutorial ====================== diff --git a/sqlsynthgen/base.py b/sqlsynthgen/base.py index 49682262..b59b734c 100644 --- a/sqlsynthgen/base.py +++ b/sqlsynthgen/base.py @@ -1,5 +1,6 @@ """Base table generator classes.""" from abc import ABC, abstractmethod +from collections.abc import Callable from dataclasses import dataclass import numpy import os @@ -7,6 +8,7 @@ from typing import Any import yaml +import gzip from sqlalchemy import Connection, insert from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.schema import Table @@ -15,6 +17,7 @@ logger, stream_yaml, MAKE_VOCAB_PROGRESS_REPORT_EVERY, + table_row_count, ) def zipf_weights(size): @@ -66,29 +69,41 @@ class FileUploader: table: Table + def _load_existing_file(self, connection: Connection, file_size: int, opener: Callable[[], Any]) -> None: + count = 0 + with opener() as fh: + rows = stream_yaml(fh) + for row in rows: + stmt = insert(self.table).values(row) + connection.execute(stmt) + connection.commit() + count += 1 + if count % MAKE_VOCAB_PROGRESS_REPORT_EVERY == 0: + logger.info( + "inserted row %d of %s, %.1f%%", + count, + self.table.name, + 100 * fh.tell() / file_size, + ) + def load(self, connection: Connection) -> None: """Load the data from file.""" yaml_file = Path(self.table.fullname + ".yaml") - if not yaml_file.exists(): - logger.warning("File %s not found. Skipping...", yaml_file) + if yaml_file.exists(): + opener = lambda: open(yaml_file, mode="r", encoding="utf-8") + else: + yaml_file = Path(self.table.fullname + ".yaml.gz") + if yaml_file.exists(): + opener = lambda: gzip.open(yaml_file, mode="rt") + else: + logger.warning("File %s not found. Skipping...", yaml_file) + return + if 0 < table_row_count(self.table, connection): + logger.warning("Table %s already contains data (consider running 'sqlsynthgen remove-vocab'), skipping...", self.table.name) return try: file_size = os.path.getsize(yaml_file) - count = 0 - with open(yaml_file, "r", encoding="utf-8") as fh: - rows = stream_yaml(fh) - for row in rows: - stmt = insert(self.table).values(row) - connection.execute(stmt) - connection.commit() - count += 1 - if count % MAKE_VOCAB_PROGRESS_REPORT_EVERY == 0: - logger.info( - "inserted row %d of %s, %.1f%%", - count, - self.table.name, - 100 * fh.tell() / file_size, - ) + self._load_existing_file(connection, file_size, opener) except yaml.YAMLError as e: logger.warning("Error reading YAML file %s: %s", yaml_file, e) return diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 102fc6e6..93ba3b0b 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -402,8 +402,11 @@ def remove_tables( @app.command() def version() -> None: """Display version information.""" - conf_logger(True) - logger.info("sqlsynthgen version %s", metadata.version(__package__)) + logger.info( + "%s version %s", + __package__, + metadata.version(__package__), + ) if __name__ == "__main__": From d69971016b49b79569e5406f010dc6ca8cbf508c Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 6 Mar 2025 18:18:37 +0000 Subject: [PATCH 34/85] Make DB tests use temporary database --- tests/test_base.py | 12 +++++------- tests/test_create.py | 12 +++++------- tests/test_functional.py | 6 ++++-- tests/test_make.py | 11 ++++++----- tests/test_providers.py | 10 +++------- tests/test_unique_generator.py | 8 +++----- tests/test_utils.py | 13 +++++-------- tests/utils.py | 19 +++++++++++++++++-- 8 files changed, 48 insertions(+), 43 deletions(-) diff --git a/tests/test_base.py b/tests/test_base.py index e24b7354..4785f82b 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -2,11 +2,11 @@ import os from pathlib import Path -from sqlalchemy import Column, Integer, create_engine, select +from sqlalchemy import Column, Integer, select from sqlalchemy.orm import declarative_base from sqlsynthgen.base import FileUploader -from tests.utils import RequiresDBTestCase, run_psql +from tests.utils import RequiresDBTestCase # pylint: disable=invalid-name Base = declarative_base() @@ -32,17 +32,15 @@ class VocabTests(RequiresDBTestCase): def setUp(self) -> None: """Pre-test setup.""" + super().setUp() + self.run_psql(Path("tests/examples/providers.dump")) - run_psql(Path("tests/examples/providers.dump")) - - self.engine = create_engine( - "postgresql://postgres:password@localhost:5432/providers" - ) metadata.create_all(self.engine) os.chdir(self.test_dir) def tearDown(self) -> None: os.chdir(self.start_dir) + super().tearDown() def test_load(self) -> None: """Test the load method.""" diff --git a/tests/test_create.py b/tests/test_create.py index 1e713af1..bbeb6a9f 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -18,7 +18,7 @@ create_db_vocab, populate, ) -from tests.utils import RequiresDBTestCase, SSGTestCase, get_test_settings, run_psql +from tests.utils import RequiresDBTestCase, SSGTestCase, get_test_settings class MyTestCase(SSGTestCase): @@ -206,16 +206,14 @@ class ColumnDefaultsTable(Base): # type: ignore def setUp(self) -> None: """Ensure we have an empty DB to work with.""" + super().setUp() dump_file_path = Path("dst.dump") examples_dir = Path("tests/examples") - run_psql(examples_dir / dump_file_path) + self.run_psql(examples_dir / dump_file_path) def test_populate(self) -> None: """Check that we can populate a table that has column defaults.""" - engine = create_engine( - "postgresql://postgres:password@localhost:5432/dst", - ) - self.metadata.create_all(engine) + self.metadata.create_all(self.engine) def my_story() -> Story: """A story generator.""" @@ -223,6 +221,6 @@ def my_story() -> Story: self.assertEqual(1, first_row["someval"]) self.assertEqual(8, first_row["otherval"]) - with engine.connect() as conn: + with self.engine.connect() as conn: with conn.begin(): _populate_story(my_story(), dict(self.metadata.tables), {}, conn) diff --git a/tests/test_functional.py b/tests/test_functional.py index 217c1305..3311bba6 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -6,7 +6,7 @@ from sqlalchemy import create_engine, inspect -from tests.utils import RequiresDBTestCase, run_psql +from tests.utils import RequiresDBTestCase # pylint: disable=subprocess-run-check @@ -62,8 +62,9 @@ class DBFunctionalTestCase(RequiresDBTestCase): def setUp(self) -> None: """Pre-test setup.""" + super().setUp() # Create a mostly-blank destination database - run_psql(self.examples_dir / self.dump_file_path) + self.run_psql(self.examples_dir / self.dump_file_path) # Copy some of the example files over to the workspace. for file in self.generator_file_paths + (self.config_file_path,): @@ -79,6 +80,7 @@ def setUp(self) -> None: def tearDown(self) -> None: os.chdir(self.start_dir) + super().tearDown() def test_workflow_minimal_args(self) -> None: """Test the recommended CLI workflow runs without errors.""" diff --git a/tests/test_make.py b/tests/test_make.py index c57a78fb..41468103 100644 --- a/tests/test_make.py +++ b/tests/test_make.py @@ -256,8 +256,8 @@ class TestMakeStats(RequiresDBTestCase): def setUp(self) -> None: """Pre-test setup.""" + super().setUp() os.chdir(self.test_dir) - self.connection_string = "postgresql://postgres:password@localhost:5432/src" conf_path = Path("example_config.yaml") with open(conf_path, "r", encoding="utf8") as f: self.config = yaml.safe_load(f) @@ -265,6 +265,7 @@ def setUp(self) -> None: def tearDown(self) -> None: """Post-test cleanup.""" os.chdir(self.start_dir) + super().tearDown() def check_make_stats_output(self, src_stats: dict) -> None: """Check that the output of make_src_stats is as expected.""" @@ -294,14 +295,14 @@ def check_make_stats_output(self, src_stats: dict) -> None: def test_make_stats_no_asyncio_schema(self) -> None: """Test that make_src_stats works when explicitly naming a schema.""" src_stats = asyncio.get_event_loop().run_until_complete( - make_src_stats(self.connection_string, self.config, "public") + make_src_stats(self.postgresql.url(), self.config, "public") ) self.check_make_stats_output(src_stats) def test_make_stats_no_asyncio(self) -> None: """Test that make_src_stats works using the example configuration.""" src_stats = asyncio.get_event_loop().run_until_complete( - make_src_stats(self.connection_string, self.config) + make_src_stats(self.postgresql.url(), self.config) ) self.check_make_stats_output(src_stats) @@ -311,7 +312,7 @@ def test_make_stats_asyncio(self) -> None: """ config_asyncio = {**self.config, "use-asyncio": True} src_stats = asyncio.get_event_loop().run_until_complete( - make_src_stats(self.connection_string, config_asyncio) + make_src_stats(self.postgresql.url(), config_asyncio) ) self.check_make_stats_output(src_stats) @@ -342,7 +343,7 @@ def test_make_stats_empty_result(self, mock_logger: MagicMock) -> None: ] } src_stats = asyncio.get_event_loop().run_until_complete( - make_src_stats(self.connection_string, config, "public") + make_src_stats(self.postgresql.url(), config, "public") ) self.assertEqual(src_stats[query_name1], []) self.assertEqual(src_stats[query_name2], []) diff --git a/tests/test_providers.py b/tests/test_providers.py index 30c53c94..d6ca85af 100644 --- a/tests/test_providers.py +++ b/tests/test_providers.py @@ -7,7 +7,7 @@ from sqlalchemy.ext.declarative import declarative_base from sqlsynthgen import providers -from tests.utils import RequiresDBTestCase, SSGTestCase, run_psql +from tests.utils import RequiresDBTestCase, SSGTestCase # pylint: disable=invalid-name Base = declarative_base() @@ -40,12 +40,8 @@ class ColumnValueProviderTestCase(RequiresDBTestCase): def setUp(self) -> None: """Pre-test setup.""" - - run_psql(Path("tests/examples/providers.dump")) - - self.engine = create_engine( - "postgresql://postgres:password@localhost:5432/providers", - ) + super().setUp() + self.run_psql(Path("tests/examples/providers.dump")) metadata.create_all(self.engine) def test_column_value_present(self) -> None: diff --git a/tests/test_unique_generator.py b/tests/test_unique_generator.py index 4215ec0a..5f56a6b4 100644 --- a/tests/test_unique_generator.py +++ b/tests/test_unique_generator.py @@ -14,7 +14,7 @@ from sqlalchemy.ext.declarative import declarative_base from sqlsynthgen.unique_generator import UniqueGenerator -from tests.utils import RequiresDBTestCase, run_psql +from tests.utils import RequiresDBTestCase # pylint: disable=invalid-name Base = declarative_base() @@ -43,10 +43,8 @@ class UniqueGeneratorTestCase(RequiresDBTestCase): def setUp(self) -> None: """Pre-test setup.""" - run_psql(Path("tests/examples/unique_generator.dump")) - self.engine = create_engine( - "postgresql://postgres:password@localhost:5432/unique_generator_test", - ) + super().setUp() + self.run_psql(Path("tests/examples/unique_generator.dump")) metadata.create_all(self.engine) def test_unique_generator_empty_table(self) -> None: diff --git a/tests/test_utils.py b/tests/test_utils.py index d1455fff..2c34c811 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -15,7 +15,7 @@ import_file, read_config_file, ) -from tests.utils import RequiresDBTestCase, SSGTestCase, run_psql +from tests.utils import RequiresDBTestCase, SSGTestCase # pylint: disable=invalid-name Base = declarative_base() @@ -66,13 +66,9 @@ class TestDownload(RequiresDBTestCase): def setUp(self) -> None: """Pre-test setup.""" + super().setUp() - run_psql(Path("tests/examples/providers.dump")) - - self.engine = create_engine( - "postgresql://postgres:password@localhost:5432/providers", - connect_args={"connect_timeout": 10}, - ) + self.run_psql(Path("tests/examples/providers.dump")) metadata.create_all(self.engine) os.chdir(self.test_dir) @@ -81,6 +77,7 @@ def setUp(self) -> None: def tearDown(self) -> None: """Post-test cleanup.""" os.chdir(self.start_dir) + super().tearDown() def test_download_table(self) -> None: """Test the download_table function.""" @@ -90,7 +87,7 @@ def test_download_table(self) -> None: conn.execute(insert(MyTable).values({"id": 1})) conn.commit() - download_table(MyTable.__table__, self.engine, self.mytable_file_path) + download_table(MyTable.__table__, self.engine, self.mytable_file_path, compress=False) # The .strip() gets rid of any possible empty lines at the end of the file. with Path("../examples/expected.yaml").open(encoding="utf-8") as yamlfile: diff --git a/tests/utils.py b/tests/utils.py index f0182912..018c2dfd 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -2,7 +2,9 @@ import os from functools import lru_cache from pathlib import Path +from sqlalchemy import create_engine from subprocess import run +import testing.postgresql from typing import Any from unittest import TestCase, skipUnless @@ -26,7 +28,7 @@ def get_test_settings() -> settings.Settings: ) -def run_psql(dump_file: Path) -> None: +def run_psql(dump_file: Path, dsn: str="host=localhost port=5432 user=postgres") -> None: """Run psql and pass dump_file_name as the --file option.""" # If you need to update a .dump file, use @@ -46,7 +48,7 @@ def run_psql(dump_file: Path) -> None: # Clear and re-create the test database completed_process = run( - ["psql", "--host=localhost", "--username=postgres", f"--file={dump_file}"], + ["psql", "-d", dsn, f"--file={dump_file}"], capture_output=True, env=env, check=True, @@ -85,3 +87,16 @@ def assertFailure(self, result: Any) -> None: # pylint: disable=invalid-name @skipUnless(os.environ.get("REQUIRES_DB") == "1", "Set 'REQUIRES_DB=1' to enable.") class RequiresDBTestCase(SSGTestCase): """A test case that only runs if REQUIRES_DB has been set to 1.""" + def setUp(self) -> None: + super().setUp() + self.postgresql = testing.postgresql.Postgresql() + self.engine = create_engine( + self.postgresql.url(), + ) + + def tearDown(self) -> None: + self.postgresql.stop() + super().tearDown() + + def run_psql(self, file_path: Path) -> None: + run_psql(file_path, self.postgresql.url()) From 32f66aa9398c0fb2cd05993d897d118ded4e504e Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 7 Mar 2025 13:20:07 +0000 Subject: [PATCH 35/85] factored out tests loading .dump files and fixed RstTests.test_dir --- docs/source/introduction.rst | 2 +- tests/test_base.py | 2 +- tests/test_create.py | 8 +--- tests/test_functional.py | 2 - tests/test_providers.py | 2 +- tests/test_rst.py | 32 ++++++--------- tests/test_unique_generator.py | 2 +- tests/test_utils.py | 24 +---------- tests/utils.py | 73 +++++++++++++++++----------------- 9 files changed, 56 insertions(+), 91 deletions(-) diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst index 5ff6b944..a522b97c 100644 --- a/docs/source/introduction.rst +++ b/docs/source/introduction.rst @@ -98,7 +98,7 @@ Also, while we are at it let's give the actors sensible names: We can see that we are setting the column we want changed with the ``columns_assigned`` property, but what does this ``name`` property mean? This is a Python function that generates the random data for us. -``generic.`` refers to the Mimesis `Generic provider `_ that combines all the other Mimesis providers. +``generic.`` refers to the `Mimesis generic provider `_ that combines all the other Mimesis providers. These all use the ``EN_GB`` locale, which currently cannot be changed. Some examples of useful providers you can use are: - `generic.text. `_ generates words, sentences, colours and more. diff --git a/tests/test_base.py b/tests/test_base.py index 4785f82b..fbb85186 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -27,13 +27,13 @@ class BaseTable(Base): # type: ignore class VocabTests(RequiresDBTestCase): """Module test case.""" + dump_file_path = "providers.dump" test_dir = Path("tests/examples") start_dir = os.getcwd() def setUp(self) -> None: """Pre-test setup.""" super().setUp() - self.run_psql(Path("tests/examples/providers.dump")) metadata.create_all(self.engine) os.chdir(self.test_dir) diff --git a/tests/test_create.py b/tests/test_create.py index bbeb6a9f..5005758a 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -192,6 +192,7 @@ def test_create_db_vocab( class TestStoryDefaults(RequiresDBTestCase): """Test that we can handle column defaults in stories.""" + dump_file_path = "dst.dump" # pylint: disable=invalid-name Base = declarative_base() # pylint: enable=invalid-name @@ -204,13 +205,6 @@ class ColumnDefaultsTable(Base): # type: ignore someval = Column(Integer, primary_key=True) otherval = Column(Integer, server_default="8") - def setUp(self) -> None: - """Ensure we have an empty DB to work with.""" - super().setUp() - dump_file_path = Path("dst.dump") - examples_dir = Path("tests/examples") - self.run_psql(examples_dir / dump_file_path) - def test_populate(self) -> None: """Check that we can populate a table that has column defaults.""" self.metadata.create_all(self.engine) diff --git a/tests/test_functional.py b/tests/test_functional.py index 3311bba6..a9079f58 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -63,8 +63,6 @@ class DBFunctionalTestCase(RequiresDBTestCase): def setUp(self) -> None: """Pre-test setup.""" super().setUp() - # Create a mostly-blank destination database - self.run_psql(self.examples_dir / self.dump_file_path) # Copy some of the example files over to the workspace. for file in self.generator_file_paths + (self.config_file_path,): diff --git a/tests/test_providers.py b/tests/test_providers.py index d6ca85af..1ea3f118 100644 --- a/tests/test_providers.py +++ b/tests/test_providers.py @@ -37,11 +37,11 @@ def test_bytes(self) -> None: class ColumnValueProviderTestCase(RequiresDBTestCase): """Tests for the ColumnValueProvider class.""" + dump_file_path = "providers.dump" def setUp(self) -> None: """Pre-test setup.""" super().setUp() - self.run_psql(Path("tests/examples/providers.dump")) metadata.create_all(self.engine) def test_column_value_present(self) -> None: diff --git a/tests/test_rst.py b/tests/test_rst.py index ecb892b3..090658c1 100644 --- a/tests/test_rst.py +++ b/tests/test_rst.py @@ -34,23 +34,17 @@ def test_dir(self) -> None: 'Hyperlink target "page-installation" is not referenced.', 'Hyperlink target "story-generators" is not referenced.', ] - filtered_errors = [] - for file_errors in all_errors: - for file_error in file_errors: - skip = False - - error_message = file_error.full_message - - for allowed_error in allowed_errors: - if allowed_error in error_message: - skip = True - break - - if skip: - continue - - filtered_errors.append(file_error) + filtered_errors = [ + file_error + for file_errors in all_errors + for file_error in file_errors + # Only worry about ERRORs and WARNINGs + if file_error.level <= 2 + if not any(filter(lambda m: m in file_error.full_message, allowed_errors)) + ] - # Only worry about ERRORs and WARNINGs - level_one_errors = [x.full_message for x in filtered_errors if x.level <= 2] - self.assertListEqual([], level_one_errors) + if filtered_errors: + self.fail(msg="\n".join([ + f"{err.source}({err.line}): {["Severe", "Error", "Warning"][err.level]}: {err.full_message}" + for err in filtered_errors + ])) diff --git a/tests/test_unique_generator.py b/tests/test_unique_generator.py index 5f56a6b4..0e114337 100644 --- a/tests/test_unique_generator.py +++ b/tests/test_unique_generator.py @@ -40,11 +40,11 @@ class UniqueGeneratorTestCase(RequiresDBTestCase): and b which are boolean, and c which is a text column. There is a joint unique constraint on a and b, and a separate unique constraint on c. """ + dump_file_path = "unique_generator.dump" def setUp(self) -> None: """Pre-test setup.""" super().setUp() - self.run_psql(Path("tests/examples/unique_generator.dump")) metadata.create_all(self.engine) def test_unique_generator_empty_table(self) -> None: diff --git a/tests/test_utils.py b/tests/test_utils.py index 2c34c811..245f31bc 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -59,6 +59,7 @@ def test_import_file(self) -> None: class TestDownload(RequiresDBTestCase): """Tests for the download_table function.""" + dump_file_path = "providers.dump" mytable_file_path = Path("mytable.yaml") test_dir = Path("tests/workspace") @@ -68,7 +69,6 @@ def setUp(self) -> None: """Pre-test setup.""" super().setUp() - self.run_psql(Path("tests/examples/providers.dump")) metadata.create_all(self.engine) os.chdir(self.test_dir) @@ -99,28 +99,6 @@ def test_download_table(self) -> None: self.assertEqual(expected, actual) -class TestCreateDBEngine(RequiresDBTestCase): - """Tests for the create_db_engine function.""" - - dsn = parse_obj_as(PostgresDsn, "postgresql://postgres:password@localhost") - - def test_connect_sync(self) -> None: - """Check that we can create a synchronous engine.""" - # All default params - create_db_engine(self.dsn) - - # With schema - create_db_engine(self.dsn, schema_name="public") - - def test_connect_async(self) -> None: - """Check that we can create an asynchronous engine.""" - # All default params - create_db_engine(self.dsn, use_asyncio=True) - - # With schema - create_db_engine(self.dsn, schema_name="public", use_asyncio=True) - - class TestReadConfig(SSGTestCase): """Tests for the read_config_file function.""" diff --git a/tests/utils.py b/tests/utils.py index 018c2dfd..67a45ef7 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -2,14 +2,14 @@ import os from functools import lru_cache from pathlib import Path -from sqlalchemy import create_engine +import shutil from subprocess import run import testing.postgresql from typing import Any from unittest import TestCase, skipUnless from sqlsynthgen import settings - +from sqlsynthgen.utils import create_db_engine class SysExit(Exception): """To force the function to exit as sys.exit() would.""" @@ -28,35 +28,6 @@ def get_test_settings() -> settings.Settings: ) -def run_psql(dump_file: Path, dsn: str="host=localhost port=5432 user=postgres") -> None: - """Run psql and pass dump_file_name as the --file option.""" - - # If you need to update a .dump file, use - # PGPASSWORD=password pg_dump \ - # --host=localhost \ - # --port=5432 \ - # --dbname=src \ - # --username=postgres \ - # --no-password \ - # --clean \ - # --create \ - # --insert \ - # --if-exists > tests/examples/FILENAME.dump - - env = os.environ.copy() - env = {**env, "PGPASSWORD": "password"} - - # Clear and re-create the test database - completed_process = run( - ["psql", "-d", dsn, f"--file={dump_file}"], - capture_output=True, - env=env, - check=True, - ) - # psql doesn't always return != 0 if it fails - assert completed_process.stderr == b"", completed_process.stderr - - class SSGTestCase(TestCase): """Parent class for all TestCases in SqlSynthGen.""" @@ -84,19 +55,49 @@ def assertFailure(self, result: Any) -> None: # pylint: disable=invalid-name self.assertReturnCode(result, 1) -@skipUnless(os.environ.get("REQUIRES_DB") == "1", "Set 'REQUIRES_DB=1' to enable.") +@skipUnless(shutil.which("psql"), "need to find 'psql': install PostgreSQL to enable") class RequiresDBTestCase(SSGTestCase): - """A test case that only runs if REQUIRES_DB has been set to 1.""" + """A test case that only runs if PostgreSQL is installed.""" + schema_name = None + use_asyncio = False + examples_dir = "tests/examples" + dump_file_path = None + def setUp(self) -> None: super().setUp() self.postgresql = testing.postgresql.Postgresql() - self.engine = create_engine( + self.engine = create_db_engine( self.postgresql.url(), + schema_name=self.schema_name, + use_asyncio=self.use_asyncio, ) + if self.dump_file_path is not None: + self.run_psql(Path(self.examples_dir) / Path(self.dump_file_path)) def tearDown(self) -> None: self.postgresql.stop() super().tearDown() - def run_psql(self, file_path: Path) -> None: - run_psql(file_path, self.postgresql.url()) + def run_psql(self, dump_file: Path) -> None: + """Run psql and pass dump_file_name as the --file option.""" + + # If you need to update a .dump file, use + # PGPASSWORD=password pg_dump \ + # --host=localhost \ + # --port=5432 \ + # --dbname=src \ + # --username=postgres \ + # --no-password \ + # --clean \ + # --create \ + # --insert \ + # --if-exists > tests/examples/FILENAME.dump + + # Clear and re-create the test database + completed_process = run( + ["psql", "-d", self.postgresql.url(), "-f", dump_file], + capture_output=True, + check=True, + ) + # psql doesn't always return != 0 if it fails + assert completed_process.stderr == b"", completed_process.stderr From 9c1a1aa3ba9812ca67b76c188a7ff4f85185fab7 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 7 Mar 2025 19:03:01 +0000 Subject: [PATCH 36/85] Interactive tables configuration with initial test --- sqlsynthgen/interactive.py | 261 +++++++++++++++++++++++++++++++++++++ sqlsynthgen/main.py | 27 ++++ tests/test_interactive.py | 47 +++++++ tests/test_make.py | 8 +- tests/utils.py | 13 +- 5 files changed, 349 insertions(+), 7 deletions(-) create mode 100644 sqlsynthgen/interactive.py create mode 100644 tests/test_interactive.py diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py new file mode 100644 index 00000000..f03ee9b5 --- /dev/null +++ b/sqlsynthgen/interactive.py @@ -0,0 +1,261 @@ +import cmd +from collections.abc import Mapping +from dataclasses import dataclass +from enum import Enum +import logging +from typing import Self + +from prettytable import PrettyTable +from sqlalchemy import MetaData, Table, Column, text + +from sqlsynthgen.utils import create_db_engine + +logger = logging.getLogger(__name__) + +class TableType(Enum): + NORMAL = "normal" + IGNORE = "ignore" + VOCABULARY = "vocabulary" + +@dataclass +class TableEntry: + name: str + old_type: TableType + new_type: TableType + @classmethod + def make(_cls, name: str, config: Mapping) -> Self: + tables = config.get("tables", {}) + table = tables.get(name, {}) + if table.get("ignore", False): + return TableEntry(name, TableType.IGNORE, TableType.IGNORE) + if table.get("vocabulary_table", False): + return TableEntry(name, TableType.VOCABULARY, TableType.VOCABULARY) + return TableEntry(name, TableType.NORMAL, TableType.NORMAL) + + +class AskSaveCmd(cmd.Cmd): + intro = "Do you want to save this configuration?" + prompt = "(yes/no/cancel) " + file = None + def __init__(self): + super().__init__() + self.result = "" + def do_yes(self, _arg): + self.result = "yes" + return True + def do_no(self, _arg): + self.result = "no" + return True + def do_cancel(self, _arg): + self.result = "cancel" + return True + + +class TableCmd(cmd.Cmd): + intro = "Interactive table configuration (ignore or vocabulary). Type ? for help.\n" + prompt = "(tableconf) " + file = None + ERROR_NO_MORE_TABLES = "Error: There are no more tables" + ERROR_ALREADY_AT_START = "Error: Already at the start" + + def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): + super().__init__() + self.table_entries: list[TableEntry] = [ + TableEntry.make(name, config) + for name in metadata.tables.keys() + ] + self.table_index = 0 + self.config = config + self.metadata = metadata + self.set_prompt() + self.engine = create_db_engine(src_dsn, schema_name=src_schema) + self.connection = self.engine.connect() + def __enter__(self): + return self + def __exit__(self, exc_type, exc_val, exc_tb): + self.connection.close() + self.engine.dispose() + + def set_prompt(self): + if self.table_index < len(self.table_entries): + entry = self.table_entries[self.table_index] + if entry.new_type == TableType.IGNORE: + self.prompt = "(table: {} (ignored)) ".format(entry.name) + elif entry.new_type == TableType.VOCABULARY: + self.prompt = "(table: {} (vocab)) ".format(entry.name) + else: + self.prompt = "(table: {}) ".format(entry.name) + else: + self.prompt = "(table)" + def set_type(self, t_type: TableType): + if self.table_index < len(self.table_entries): + entry = self.table_entries[self.table_index] + entry.new_type = t_type + def set_index(self, index) -> bool: + if 0 <= index and index < len(self.table_entries): + self.table_index = index + self.set_prompt() + return True + return False + def next_table(self, report="No more tables"): + if not self.set_index(self.table_index + 1): + self.print(report) + def table_name(self): + return self.table_entries[self.table_index].name + def table_metadata(self) -> Table: + return self.metadata.tables[self.table_name()] + def copy_entries(self) -> None: + tables = self.config.get("tables", {}) + for entry in self.table_entries: + if entry.old_type != entry.new_type: + table: dict = tables.get(entry.name, {}) + if entry.new_type == TableType.IGNORE: + table["ignore"] = True + table.pop("vocabulary_table", None) + elif entry.new_type == TableType.VOCABULARY: + table.pop("ignore", None) + table["vocabulary_table"] = True + else: + table.pop("ignore", None) + table.pop("vocabulary_table", None) + tables[entry.name] = table + self.config["tables"] = tables + + def print(self, text: str, *args, **kwargs): + print(text.format(*args, **kwargs)) + def print_table(self, headings: list[str], rows: list[list[str]]): + output = PrettyTable() + output.field_names = headings + for row in rows: + output.add_row(row) + print(output) + def print_results(self, result): + self.print_table( + list(result.keys()), + [list(row) for row in result.all()] + ) + def ask_save(self): + ask = AskSaveCmd() + ask.cmdloop() + return ask.result + + def do_quit(self, _arg): + "Check the updates, save them if desired and quit the configurer." + count = 0 + for entry in self.table_entries: + if entry.old_type != entry.new_type: + count += 1 + self.print( + "Changing {0} from {1} to {2}", + entry.name, + entry.old_type.value, + entry.new_type.value, + ) + if count == 0: + self.print("There are no changes.") + return True + reply = self.ask_save() + if reply == "yes": + self.copy_entries() + return True + if reply == "no": + return True + return False + def do_next(self, _arg): + "Go to the next table" + self.next_table(self.ERROR_NO_MORE_TABLES) + def do_previous(self, _arg): + "Go to the previous table" + if not self.set_index(self.table_index - 1): + self.print(self.ERROR_ALREADY_AT_START) + def do_ignore(self, _arg): + "Set the current table as ignored, and go to the next table" + self.set_type(TableType.IGNORE) + self.print("Table {} set as ignored", self.table_name()) + self.next_table() + def do_vocabulary(self, _arg): + "Set the current table as a vocabulary table, and go to the next table" + self.set_type(TableType.VOCABULARY) + self.print("Table {} set to be a vocabulary table", self.table_name()) + self.next_table() + def do_reset(self, _arg): + "Set the current table as neither a vocabulary table nor ignored, and go to the next table" + self.set_type(TableType.NORMAL) + self.print("Table {} reset", self.table_name()) + self.next_table() + def do_columns(self, _arg): + "Report the column names" + self.columnize(self.table_metadata().columns.keys()) + def do_data(self, arg: str): + """ + Report some data. + 'data' = report a random ten lines, + 'data 20' = report a random 20 lines, + 'data 20 ColumnName' = report a random twenty entries from ColumnName, + 'data 20 ColumnName 30' = report a random twenty entries from ColumnName of length at least 30, + """ + args = arg.split() + column = None + number = None + arg_index = 0 + min_length = 0 + table_metadata = self.table_metadata() + if arg_index < len(args) and args[arg_index].isnumeric(): + number = int(args[arg_index]) + arg_index += 1 + if arg_index < len(args) and args[arg_index] in table_metadata.columns: + column = args[arg_index] + arg_index += 1 + if arg_index < len(args) and args[arg_index].isnumeric(): + min_length = int(args[arg_index]) + arg_index += 1 + if arg_index != len(args): + self.print( + """Did not understand these arguments +The format is 'data [entries] [column-name [minimum-length]]' where [] means optional text. +Type 'columns' to find out valid column names for this table. +Type 'help data' for examples.""" + ) + return + if column is None: + if number is None: + number = 10 + self.print_row_data(number) + else: + if number is None: + number = 48 + self.print_column_data(column, number, min_length) + + def print_column_data(self, column: str, count: int, min_length: int): + where = "" + if 0 < min_length: + where = "WHERE LENGTH({column}) > {len}".format( + column=column, + len=min_length, + ) + result = self.connection.execute( + text("SELECT {column} FROM {table} {where} ORDER BY RANDOM() LIMIT {count}".format( + table=self.table_name(), + column=column, + count=count, + where=where, + )) + ) + self.columnize([x[0] for x in result.all()]) + + def print_row_data(self, count: int): + result = self.connection.execute( + text("SELECT * FROM {table} ORDER BY RANDOM() LIMIT {count}".format( + table=self.table_name(), + count=count, + )) + ) + if result is None: + self.print("No rows in this table!") + return + self.print_results(result) + +def update_config_tables(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): + with TableCmd(src_dsn, src_schema, metadata, config) as tc: + tc.cmdloop() + return tc.config diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 93ba3b0b..5e3e2e5d 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -12,6 +12,7 @@ from typer import Argument, Option, Typer from sqlsynthgen.create import create_db_data, create_db_tables, create_db_vocab +from sqlsynthgen.interactive import update_config_tables from sqlsynthgen.make import ( make_src_stats, make_table_generators, @@ -327,6 +328,32 @@ def generate_config( logger.debug("%s created.", config_file) +@app.command() +def configure_tables( + config_file: Optional[str] = Option(CONFIG_FILENAME, help="Path to write the configuration file to"), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), +): + """ + Interactively set tables to ignored or vocabulary. + """ + logger.debug("Configuring tables in %s.", config_file) + settings = get_settings() + src_dsn: str = _require_src_db_dsn(settings) + config_file_path = Path(config_file) + config = {} + if config_file_path.exists(): + config = yaml.load(config_file_path.read_text(encoding="UTF-8"), Loader=yaml.SafeLoader) + # we don't pass config here so that no tables are ignored + metadata = load_metadata(orm_file) + config_updated = update_config_tables(src_dsn, settings.src_schema, metadata, config) + if config_updated is None: + logger.debug("Cancelled") + return + content = yaml.dump(config_updated) + config_file_path.write_text(content, encoding="utf-8") + logger.debug("Tables configured in %s.", config_file) + + @app.command() def validate_config( config_file: Path = Argument(help="The configuration file to validate"), diff --git a/tests/test_interactive.py b/tests/test_interactive.py new file mode 100644 index 00000000..5e947611 --- /dev/null +++ b/tests/test_interactive.py @@ -0,0 +1,47 @@ +"""Tests for the base module.""" +from sqlalchemy import MetaData +from sqlalchemy.orm import declarative_base + +from sqlsynthgen.interactive import TableCmd +from tests.utils import RequiresDBTestCase + + +class TestTableCmd(TableCmd): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.reset() + def reset(self): + self.messages = [] + self.headings = [] + self.rows = [] + def print(self, text: str, *args, **kwargs): + self.messages.append((text, args, kwargs)) + def print_table(self, headings: list[str], rows: list[list[str]]): + self.headings = headings + self.rows = rows + def ask_save(self): + return "yes" + + +class ConfigureTablesTests(RequiresDBTestCase): + """Testing configure-tables.""" + dump_file_path = "src.dump" + database_name = "src" + #schema_name = "public" + + def test_table_name_prompts(self) -> None: + """Test that the prompts follow the names of the tables.""" + metadata = MetaData() + metadata.reflect(self.engine) + config = {} + with TestTableCmd(self.dsn, self.schema_name, metadata, config) as tc: + table_names = list(metadata.tables.keys()) + for t in table_names: + self.assertIn(t, tc.prompt) + tc.do_next("") + self.assertListEqual(tc.messages, [(TableCmd.ERROR_NO_MORE_TABLES, (), {})]) + tc.reset() + for t in reversed(table_names): + self.assertIn(t, tc.prompt) + tc.do_previous("") + self.assertListEqual(tc.messages, [(TableCmd.ERROR_ALREADY_AT_START, (), {})]) diff --git a/tests/test_make.py b/tests/test_make.py index 41468103..ec2feab0 100644 --- a/tests/test_make.py +++ b/tests/test_make.py @@ -295,14 +295,14 @@ def check_make_stats_output(self, src_stats: dict) -> None: def test_make_stats_no_asyncio_schema(self) -> None: """Test that make_src_stats works when explicitly naming a schema.""" src_stats = asyncio.get_event_loop().run_until_complete( - make_src_stats(self.postgresql.url(), self.config, "public") + make_src_stats(self.dsn, self.config, "public") ) self.check_make_stats_output(src_stats) def test_make_stats_no_asyncio(self) -> None: """Test that make_src_stats works using the example configuration.""" src_stats = asyncio.get_event_loop().run_until_complete( - make_src_stats(self.postgresql.url(), self.config) + make_src_stats(self.dsn, self.config) ) self.check_make_stats_output(src_stats) @@ -312,7 +312,7 @@ def test_make_stats_asyncio(self) -> None: """ config_asyncio = {**self.config, "use-asyncio": True} src_stats = asyncio.get_event_loop().run_until_complete( - make_src_stats(self.postgresql.url(), config_asyncio) + make_src_stats(self.dsn, config_asyncio) ) self.check_make_stats_output(src_stats) @@ -343,7 +343,7 @@ def test_make_stats_empty_result(self, mock_logger: MagicMock) -> None: ] } src_stats = asyncio.get_event_loop().run_until_complete( - make_src_stats(self.postgresql.url(), config, "public") + make_src_stats(self.dsn, config, "public") ) self.assertEqual(src_stats[query_name1], []) self.assertEqual(src_stats[query_name2], []) diff --git a/tests/utils.py b/tests/utils.py index 67a45ef7..fc67b30f 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -62,22 +62,29 @@ class RequiresDBTestCase(SSGTestCase): use_asyncio = False examples_dir = "tests/examples" dump_file_path = None + database_name = None def setUp(self) -> None: super().setUp() self.postgresql = testing.postgresql.Postgresql() + if self.dump_file_path is not None: + self.run_psql(Path(self.examples_dir) / Path(self.dump_file_path)) self.engine = create_db_engine( - self.postgresql.url(), + self.dsn, schema_name=self.schema_name, use_asyncio=self.use_asyncio, ) - if self.dump_file_path is not None: - self.run_psql(Path(self.examples_dir) / Path(self.dump_file_path)) def tearDown(self) -> None: self.postgresql.stop() super().tearDown() + @property + def dsn(self): + if self.database_name: + return self.postgresql.url(database=self.database_name) + return self.postgresql.url() + def run_psql(self, dump_file: Path) -> None: """Run psql and pass dump_file_name as the --file option.""" From f929a1057c1f825b7639571045e8eacfe646a41a Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 11 Mar 2025 10:18:47 +0000 Subject: [PATCH 37/85] Remaining tests for configure-tables --- .gitignore | 2 + poetry.lock | 102 ++++++++++++++++++++++++++++++++++- pyproject.toml | 2 + sqlsynthgen/interactive.py | 13 ++++- tests/examples/src.dump | 6 +-- tests/test_interactive.py | 107 ++++++++++++++++++++++++++++++++++++- 6 files changed, 224 insertions(+), 8 deletions(-) diff --git a/.gitignore b/.gitignore index 2f04884e..ee517fa3 100644 --- a/.gitignore +++ b/.gitignore @@ -143,7 +143,9 @@ docs/temp/* # vim swap files *.swp +# tool outputs ssg.py orm.yaml src-stats.yaml config.yaml +*.yaml.gz diff --git a/poetry.lock b/poetry.lock index 334264a4..2ec43f02 100644 --- a/poetry.lock +++ b/poetry.lock @@ -21,6 +21,17 @@ files = [ {file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"}, ] +[[package]] +name = "asn1crypto" +version = "1.5.1" +description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" +optional = false +python-versions = "*" +files = [ + {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, + {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, +] + [[package]] name = "astroid" version = "3.3.6" @@ -1152,6 +1163,21 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] +[[package]] +name = "pg8000" +version = "1.31.2" +description = "PostgreSQL interface library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pg8000-1.31.2-py3-none-any.whl", hash = "sha256:436c771ede71af4d4c22ba867a30add0bc5c942d7ab27fadbb6934a487ecc8f6"}, + {file = "pg8000-1.31.2.tar.gz", hash = "sha256:1ea46cf09d8eca07fe7eaadefd7951e37bee7fabe675df164f1a572ffb300876"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.2" +scramp = ">=1.4.5" + [[package]] name = "platformdirs" version = "4.3.6" @@ -1200,6 +1226,23 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "prettytable" +version = "3.15.1" +description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" +optional = false +python-versions = ">=3.9" +files = [ + {file = "prettytable-3.15.1-py3-none-any.whl", hash = "sha256:1bb0da7437e904ec879d2998aded19abc722719aa3d384a7faa44dcbe4aeb2e9"}, + {file = "prettytable-3.15.1.tar.gz", hash = "sha256:f0edb38060cb9161b2417939bfd5cd9877da73388fb19d1e8bf7987e8558896e"}, +] + +[package.dependencies] +wcwidth = "*" + +[package.extras] +tests = ["pytest", "pytest-cov", "pytest-lazy-fixtures"] + [[package]] name = "psycopg2-binary" version = "2.9.10" @@ -1697,6 +1740,20 @@ sphinx = ["sphinx (>=4.0,<6.0)"] testing = ["coverage-conditional-plugin (>=0.5)", "coverage[toml] (>=6.0)", "pytest (>=6.0)", "pytest-cov (>=3.0)", "pytest-mock (>=3.7)", "pytest-randomly (>=3.0)", "pytest-sugar (>=0.9.5)"] toml = ["tomli (>=2.0,<3.0)"] +[[package]] +name = "scramp" +version = "1.4.5" +description = "An implementation of the SCRAM protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "scramp-1.4.5-py3-none-any.whl", hash = "sha256:50e37c464fc67f37994e35bee4151e3d8f9320e9c204fca83a5d313c121bbbe7"}, + {file = "scramp-1.4.5.tar.gz", hash = "sha256:be3fbe774ca577a7a658117dca014e5d254d158cecae3dd60332dfe33ce6d78e"}, +] + +[package.dependencies] +asn1crypto = ">=1.5.1" + [[package]] name = "six" version = "1.17.0" @@ -2038,6 +2095,38 @@ test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3 timezone = ["python-dateutil"] url = ["furl (>=0.4.1)"] +[[package]] +name = "testing-common-database" +version = "2.0.3" +description = "utilities for testing.* packages" +optional = false +python-versions = "*" +files = [ + {file = "testing.common.database-2.0.3-py2.py3-none-any.whl", hash = "sha256:e3ed492bf480a87f271f74c53b262caf5d85c8bc09989a8f534fa2283ec52492"}, + {file = "testing.common.database-2.0.3.tar.gz", hash = "sha256:965d80b2985315325dc358c3061b174a712f4d4d5bf6a80b58b11f9a1dd86d73"}, +] + +[package.extras] +testing = ["nose"] + +[[package]] +name = "testing-postgresql" +version = "1.3.0" +description = "automatically setups a postgresql instance in a temporary directory, and destroys it after testing" +optional = false +python-versions = "*" +files = [ + {file = "testing.postgresql-1.3.0-py2.py3-none-any.whl", hash = "sha256:1b41daeb98dfc8cd4a584bb91e8f5f4ab182993870f95257afe5f1ba6151a598"}, + {file = "testing.postgresql-1.3.0.tar.gz", hash = "sha256:8e1a69760369a7a8ffe63a66b6d95a5cd82db2fb976e4a8f85ffd24fbfc447d8"}, +] + +[package.dependencies] +pg8000 = ">=1.10" +"testing.common.database" = "*" + +[package.extras] +testing = ["SQLAlchemy", "nose", "psycopg2"] + [[package]] name = "tomli" version = "2.2.1" @@ -2228,6 +2317,17 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + [[package]] name = "zipp" version = "3.21.0" @@ -2253,4 +2353,4 @@ docs = ["sphinx-rtd-theme", "sphinxcontrib-napoleon"] [metadata] lock-version = "2.0" python-versions = "^3.9,<3.13" -content-hash = "2240d60cdeb415bf60f58ada560652991673f18581bbe16f085cb490ec26a711" +content-hash = "c87bf05633a11e794207a9c7b44893f331b3236ecc51b81c3a7aab97c82275db" diff --git a/pyproject.toml b/pyproject.toml index b56cc864..dccf8a08 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ greenlet = "^3.1.1" pymysql = "^1.1.0" pandas = "^2" parsy = "^2.1" +prettytable = "^3.15.1" [tool.poetry.group.dev.dependencies] isort = "^5.10.1" @@ -47,6 +48,7 @@ pygments = "^2.14.0" rstcheck-core = {extras = ["sphinx"], version = "^1.0.3"} json-schema-for-humans = "^0.44.5" pre-commit = "^3.3.3" +testing-postgresql = "^1.3.0" [tool.poetry.group.extras.dependencies] tqdm = "^4.65.0" diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index f03ee9b5..6c823db6 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -57,6 +57,7 @@ class TableCmd(cmd.Cmd): file = None ERROR_NO_MORE_TABLES = "Error: There are no more tables" ERROR_ALREADY_AT_START = "Error: Already at the start" + ERROR_NO_SUCH_TABLE = "Error: '{0}' is not the name of a table in this database" def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): super().__init__() @@ -162,7 +163,15 @@ def do_quit(self, _arg): return True return False def do_next(self, _arg): - "Go to the next table" + "'next' = go to the next table, 'next tablename' = go to table 'tablename'" + if _arg: + # Find the index of the table called _arg, if any + index = next((i for i,entry in enumerate(self.table_entries) if entry.name == _arg), None) + if index is None: + self.print(self.ERROR_NO_SUCH_TABLE, _arg) + return + self.set_index(index) + return self.next_table(self.ERROR_NO_MORE_TABLES) def do_previous(self, _arg): "Go to the previous table" @@ -229,7 +238,7 @@ def do_data(self, arg: str): def print_column_data(self, column: str, count: int, min_length: int): where = "" if 0 < min_length: - where = "WHERE LENGTH({column}) > {len}".format( + where = "WHERE LENGTH({column}) >= {len}".format( column=column, len=min_length, ) diff --git a/tests/examples/src.dump b/tests/examples/src.dump index f03c3443..1adc3fb6 100644 --- a/tests/examples/src.dump +++ b/tests/examples/src.dump @@ -281,9 +281,9 @@ INSERT INTO public.person VALUES (7, 'Randy Random', false, '2023-03-01 00:00:00 INSERT INTO public.person VALUES (8, 'Randy Random', false, '2023-03-01 00:00:00+00'); INSERT INTO public.person VALUES (9, 'Randy Random', false, '2023-03-01 00:00:00+00'); INSERT INTO public.person VALUES (10, 'Randy Random', true, '2023-03-01 00:00:00+00'); -INSERT INTO public.person VALUES (11, 'Randy Random', false, '2023-03-01 00:00:00+00'); -INSERT INTO public.person VALUES (12, 'Randy Random', false, '2023-03-01 00:00:00+00'); -INSERT INTO public.person VALUES (13, 'Randy Random', true, '2023-03-01 00:00:00+00'); +INSERT INTO public.person VALUES (11, 'Testfried Testermann', false, '2023-03-01 00:00:00+00'); +INSERT INTO public.person VALUES (12, 'Veronica Fyre', false, '2023-03-01 00:00:00+00'); +INSERT INTO public.person VALUES (13, 'Miranda Rando-Generata', true, '2023-03-01 00:00:00+00'); INSERT INTO public.person VALUES (14, 'Randy Random', false, '2023-03-01 00:00:00+00'); INSERT INTO public.person VALUES (15, 'Randy Random', true, '2023-03-01 00:00:00+00'); INSERT INTO public.person VALUES (16, 'Randy Random', true, '2023-03-01 00:00:00+00'); diff --git a/tests/test_interactive.py b/tests/test_interactive.py index 5e947611..370a2675 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -1,5 +1,5 @@ """Tests for the base module.""" -from sqlalchemy import MetaData +from sqlalchemy import MetaData, select from sqlalchemy.orm import declarative_base from sqlsynthgen.interactive import TableCmd @@ -14,11 +14,14 @@ def reset(self): self.messages = [] self.headings = [] self.rows = [] + self.column_items = [] def print(self, text: str, *args, **kwargs): self.messages.append((text, args, kwargs)) def print_table(self, headings: list[str], rows: list[list[str]]): self.headings = headings self.rows = rows + def columnize(self, items): + self.column_items.append(items) def ask_save(self): return "yes" @@ -27,7 +30,7 @@ class ConfigureTablesTests(RequiresDBTestCase): """Testing configure-tables.""" dump_file_path = "src.dump" database_name = "src" - #schema_name = "public" + schema_name = "public" def test_table_name_prompts(self) -> None: """Test that the prompts follow the names of the tables.""" @@ -45,3 +48,103 @@ def test_table_name_prompts(self) -> None: self.assertIn(t, tc.prompt) tc.do_previous("") self.assertListEqual(tc.messages, [(TableCmd.ERROR_ALREADY_AT_START, (), {})]) + tc.reset() + bad_table_name = "notarealtable" + tc.do_next(bad_table_name) + self.assertListEqual(tc.messages, [(TableCmd.ERROR_NO_SUCH_TABLE, (bad_table_name,), {})]) + tc.reset() + good_table_name = table_names[2] + tc.do_next(good_table_name) + self.assertListEqual(tc.messages, []) + self.assertIn(good_table_name, tc.prompt) + + def test_column_display(self) -> None: + """Test that we can see the names of the columns.""" + metadata = MetaData() + metadata.reflect(self.engine) + config = {} + with TestTableCmd(self.dsn, self.schema_name, metadata, config) as tc: + tc.do_next("unique_constraint_test") + tc.do_columns("") + self.assertListEqual(tc.column_items, [["id", "a", "b", "c"]]) + + def test_configure_tables(self) -> None: + """Test that we can change columns to ignore, vocab or reset.""" + metadata = MetaData() + metadata.reflect(self.engine) + config = { + "tables": { + "unique_constraint_test": { + "vocabulary_table": True, + }, + "no_pk_test": { + "ignore": True, + }, + }, + } + with TestTableCmd(self.dsn, self.schema_name, metadata, config) as tc: + tc.do_next("unique_constraint_test") + tc.do_reset("") + tc.do_next("person") + tc.do_vocabulary("") + tc.do_next("mitigation_type") + tc.do_ignore("") + tc.do_quit("") + tables = tc.config["tables"] + self.assertFalse(tables["unique_constraint_test"].get("vocabulary_table", False)) + self.assertFalse(tables["unique_constraint_test"].get("ignore", False)) + self.assertFalse(tables["no_pk_test"].get("vocabulary_table", False)) + self.assertTrue(tables["no_pk_test"].get("ignore", False)) + self.assertTrue(tables["person"].get("vocabulary_table", False)) + self.assertFalse(tables["person"].get("ignore", False)) + self.assertFalse(tables["mitigation_type"].get("vocabulary_table", False)) + self.assertTrue(tables["mitigation_type"].get("ignore", False)) + + def test_print_data(self) -> None: + """Test that we can print random rows from the table and random data from columns.""" + metadata = MetaData() + metadata.reflect(self.engine) + person_table = metadata.tables["person"] + with self.engine.connect() as conn: + person_rows = conn.execute(select(person_table)).mappings().fetchall() + person_data = { + row["person_id"]: row + for row in person_rows + } + name_set = {row["name"] for row in person_rows} + person_headings = ["person_id", "name", "research_opt_out", "stored_from"] + config = {} + with TestTableCmd(self.dsn, self.schema_name, metadata, config) as tc: + tc.do_next("person") + tc.do_data("") + self.assertListEqual(tc.headings, person_headings) + self.assertEqual(len(tc.rows), 10) # default number of rows is 10 + for row in tc.rows: + expected = person_data[row[0]] + self.assertListEqual(row, [expected[h] for h in person_headings]) + tc.reset() + rows_to_get_count = 6 + tc.do_data(str(rows_to_get_count)) + self.assertListEqual(tc.headings, person_headings) + self.assertEqual(len(tc.rows), rows_to_get_count) + for row in tc.rows: + expected = person_data[row[0]] + self.assertListEqual(row, [expected[h] for h in person_headings]) + tc.reset() + to_get_count = 12 + tc.do_data(f"{to_get_count} name") + self.assertEqual(len(tc.column_items), 1) + self.assertEqual(len(tc.column_items[0]), to_get_count) + self.assertLessEqual(set(tc.column_items[0]), name_set) + tc.reset() + tc.do_data(f"{to_get_count} name 12") + self.assertEqual(len(tc.column_items), 1) + self.assertEqual(len(tc.column_items[0]), to_get_count) + tc.reset() + tc.do_data(f"{to_get_count} name 13") + self.assertEqual(len(tc.column_items), 1) + self.assertEqual(set(tc.column_items[0]), set(filter(lambda n: 13 <= len(n), name_set))) + tc.reset() + tc.do_data(f"{to_get_count} name 16") + self.assertEqual(len(tc.column_items), 1) + self.assertEqual(set(tc.column_items[0]), set(filter(lambda n: 16 <= len(n), name_set))) From 5e26bda6328cf52d7ea935e9227bbc283c3644a7 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 21 Mar 2025 14:55:13 +0000 Subject: [PATCH 38/85] list-tables command --- sqlsynthgen/main.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 5e3e2e5d..1de2fd12 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -1,5 +1,6 @@ """Entrypoint for the SQLSynthGen package.""" import asyncio +from enum import Enum import json import sys from importlib import metadata @@ -426,6 +427,37 @@ def remove_tables( logger.info("Would remove tables if called with --yes.") +class TableType(str, Enum): + all = "all" + vocab = "vocab" + generated = "generated" + + +@app.command() +def list_tables( + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), + config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), + tables: TableType = Option(TableType.generated, help="Which tables to list"), +) -> None: + """List the names of tables""" + config = read_config_file(config_file) if config_file is not None else {} + orm_metadata = load_metadata(orm_file, config) + all_table_names = set(orm_metadata.tables.keys()) + vocab_table_names = { + table_name + for (table_name, table_config) in config.get("tables", {}).items() + if get_flag(table_config, "vocabulary_table") + } + if tables == TableType.all: + names = all_table_names + elif tables == TableType.generated: + names = all_table_names - vocab_table_names + else: + names = vocab_table_names + for name in sorted(names): + print(name) + + @app.command() def version() -> None: """Display version information.""" From 52779cc68a1ec9043bc87adcf3d82b8439bdaa93 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 26 Mar 2025 18:41:20 +0000 Subject: [PATCH 39/85] configure-tables gains tab-completion for next and data commands --- sqlsynthgen/interactive.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 6c823db6..906d00c5 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -173,6 +173,12 @@ def do_next(self, _arg): self.set_index(index) return self.next_table(self.ERROR_NO_MORE_TABLES) + def complete_next(self, text, line, begidx, endidx): + return [ + entry.name + for entry in self.table_entries + if entry.name.startswith(text) + ] def do_previous(self, _arg): "Go to the previous table" if not self.set_index(self.table_index - 1): @@ -234,6 +240,15 @@ def do_data(self, arg: str): if number is None: number = 48 self.print_column_data(column, number, min_length) + def complete_data(self, text, line, begidx, endidx): + previous_parts = line[:begidx - 1].split() + if len(previous_parts) != 2: + return [] + table_metadata = self.table_metadata() + return [ + k for k in table_metadata.columns.keys() + if k.startswith(text) + ] def print_column_data(self, column: str, count: int, min_length: int): where = "" @@ -250,7 +265,7 @@ def print_column_data(self, column: str, count: int, min_length: int): where=where, )) ) - self.columnize([x[0] for x in result.all()]) + self.columnize([str(x[0]) for x in result.all()]) def print_row_data(self, count: int): result = self.connection.execute( From 1c154a3cbf60783aafee0cc94bf92275c3d9d1cd Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 27 Mar 2025 14:30:27 +0000 Subject: [PATCH 40/85] configure-tables list command --- sqlsynthgen/interactive.py | 13 +++++++++++ tests/test_interactive.py | 47 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 60 insertions(+) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 906d00c5..a3b55ee2 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -17,6 +17,12 @@ class TableType(Enum): IGNORE = "ignore" VOCABULARY = "vocabulary" +TYPE_LETTER = { + TableType.NORMAL: " ", + TableType.IGNORE: "I", + TableType.VOCABULARY: "V", +} + @dataclass class TableEntry: name: str @@ -162,6 +168,13 @@ def do_quit(self, _arg): if reply == "no": return True return False + def do_list(self, arg): + "list the tables with their types" + for entry in self.table_entries: + old = entry.old_type + new = entry.new_type + becomes = " " if old == new else "->" + TYPE_LETTER[new] + self.print("{0}{1} {2}", TYPE_LETTER[old], becomes, entry.name) def do_next(self, _arg): "'next' = go to the next table, 'next tablename' = go to table 'tablename'" if _arg: diff --git a/tests/test_interactive.py b/tests/test_interactive.py index 370a2675..521a8ae5 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -148,3 +148,50 @@ def test_print_data(self) -> None: tc.do_data(f"{to_get_count} name 16") self.assertEqual(len(tc.column_items), 1) self.assertEqual(set(tc.column_items[0]), set(filter(lambda n: 16 <= len(n), name_set))) + + def test_list_tables(self): + """Test that we can list the tables""" + metadata = MetaData() + metadata.reflect(self.engine) + config = { + "tables": { + "unique_constraint_test": { + "vocabulary_table": True, + }, + "no_pk_test": { + "ignore": True, + }, + }, + } + with TestTableCmd(self.dsn, self.schema_name, metadata, config) as tc: + tc.do_next("unique_constraint_test") + tc.do_ignore("") + tc.do_next("person") + tc.do_vocabulary("") + tc.reset() + tc.do_list("") + person_listed = False + unique_constraint_test_listed = False + no_pk_test_listed = False + for (text, args, kwargs) in tc.messages: + if args[2] == "person": + self.assertFalse(person_listed) + person_listed = True + self.assertEqual(args[0], " ") + self.assertEqual(args[1], "->V") + elif args[2] == "unique_constraint_test": + self.assertFalse(unique_constraint_test_listed) + unique_constraint_test_listed = True + self.assertEqual(args[0], "V") + self.assertEqual(args[1], "->I") + elif args[2] == "no_pk_test": + self.assertFalse(no_pk_test_listed) + no_pk_test_listed = True + self.assertEqual(args[0], "I") + self.assertEqual(args[1], " ") + else: + self.assertEqual(args[0], " ") + self.assertEqual(args[1], " ") + self.assertTrue(person_listed) + self.assertTrue(unique_constraint_test_listed) + self.assertTrue(no_pk_test_listed) From b5543b169b7700c70b8807425d9c6b5f0690fc67 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 3 Apr 2025 18:37:27 +0100 Subject: [PATCH 41/85] Initial generator interactivity --- sqlsynthgen/interactive.py | 426 +++++++++++++++++--- sqlsynthgen/json_schemas/config_schema.json | 4 + sqlsynthgen/main.py | 29 +- tests/test_interactive.py | 4 +- 4 files changed, 397 insertions(+), 66 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index a3b55ee2..2f6b9020 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -1,12 +1,12 @@ +from abc import ABC, abstractmethod import cmd from collections.abc import Mapping from dataclasses import dataclass from enum import Enum import logging -from typing import Self from prettytable import PrettyTable -from sqlalchemy import MetaData, Table, Column, text +from sqlalchemy import MetaData, Table, text from sqlsynthgen.utils import create_db_engine @@ -16,27 +16,25 @@ class TableType(Enum): NORMAL = "normal" IGNORE = "ignore" VOCABULARY = "vocabulary" + PRIVATE = "private" TYPE_LETTER = { TableType.NORMAL: " ", TableType.IGNORE: "I", TableType.VOCABULARY: "V", + TableType.PRIVATE: "P", +} + +TYPE_PROMPT = { + TableType.NORMAL: "(table: {}) ", + TableType.IGNORE: "(table: {} (ignore)) ", + TableType.VOCABULARY: "(table: {} (vocab)) ", + TableType.PRIVATE: "(table: {} (private)) ", } @dataclass class TableEntry: name: str - old_type: TableType - new_type: TableType - @classmethod - def make(_cls, name: str, config: Mapping) -> Self: - tables = config.get("tables", {}) - table = tables.get(name, {}) - if table.get("ignore", False): - return TableEntry(name, TableType.IGNORE, TableType.IGNORE) - if table.get("vocabulary_table", False): - return TableEntry(name, TableType.VOCABULARY, TableType.VOCABULARY) - return TableEntry(name, TableType.NORMAL, TableType.NORMAL) class AskSaveCmd(cmd.Cmd): @@ -57,24 +55,25 @@ def do_cancel(self, _arg): return True -class TableCmd(cmd.Cmd): - intro = "Interactive table configuration (ignore or vocabulary). Type ? for help.\n" - prompt = "(tableconf) " - file = None +class DbCmd(ABC, cmd.Cmd): ERROR_NO_MORE_TABLES = "Error: There are no more tables" ERROR_ALREADY_AT_START = "Error: Already at the start" ERROR_NO_SUCH_TABLE = "Error: '{0}' is not the name of a table in this database" + @abstractmethod + def make_table_entry(self, name: str) -> TableEntry: + ... + def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): super().__init__() - self.table_entries: list[TableEntry] = [ - TableEntry.make(name, config) - for name in metadata.tables.keys() - ] - self.table_index = 0 self.config = config self.metadata = metadata - self.set_prompt() + self.table_entries: list[TableEntry] = [] + for name in metadata.tables.keys(): + entry = self.make_table_entry(name) + if entry is not None: + self.table_entries.append(entry) + self.table_index = 0 self.engine = create_db_engine(src_dsn, schema_name=src_schema) self.connection = self.engine.connect() def __enter__(self): @@ -83,34 +82,77 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.connection.close() self.engine.dispose() - def set_prompt(self): - if self.table_index < len(self.table_entries): - entry = self.table_entries[self.table_index] - if entry.new_type == TableType.IGNORE: - self.prompt = "(table: {} (ignored)) ".format(entry.name) - elif entry.new_type == TableType.VOCABULARY: - self.prompt = "(table: {} (vocab)) ".format(entry.name) - else: - self.prompt = "(table: {}) ".format(entry.name) - else: - self.prompt = "(table)" - def set_type(self, t_type: TableType): - if self.table_index < len(self.table_entries): - entry = self.table_entries[self.table_index] - entry.new_type = t_type - def set_index(self, index) -> bool: + def print(self, text: str, *args, **kwargs): + print(text.format(*args, **kwargs)) + def print_table(self, headings: list[str], rows: list[list[str]]): + output = PrettyTable() + output.field_names = headings + for row in rows: + output.add_row(row) + print(output) + def print_results(self, result): + self.print_table( + list(result.keys()), + [list(row) for row in result.all()] + ) + def ask_save(self): + ask = AskSaveCmd() + ask.cmdloop() + return ask.result + + def set_table_index(self, index) -> bool: if 0 <= index and index < len(self.table_entries): self.table_index = index self.set_prompt() return True return False def next_table(self, report="No more tables"): - if not self.set_index(self.table_index + 1): + if not self.set_table_index(self.table_index + 1): self.print(report) + return False + return True def table_name(self): return self.table_entries[self.table_index].name def table_metadata(self) -> Table: return self.metadata.tables[self.table_name()] + + +@dataclass +class TableCmdTableEntry(TableEntry): + old_type: TableType + new_type: TableType + +class TableCmd(DbCmd): + intro = "Interactive table configuration (ignore, vocabulary or private). Type ? for help.\n" + prompt = "(tableconf) " + file = None + + def make_table_entry(self, name: str) -> TableEntry: + tables = self.config.get("tables", {}) + table = tables.get(name, {}) + if table.get("ignore", False): + return TableCmdTableEntry(name, TableType.IGNORE, TableType.IGNORE) + if table.get("vocabulary_table", False): + return TableCmdTableEntry(name, TableType.VOCABULARY, TableType.VOCABULARY) + if table.get("primary_private", False): + return TableCmdTableEntry(name, TableType.PRIVATE, TableType.PRIVATE) + return TableCmdTableEntry(name, TableType.NORMAL, TableType.NORMAL) + + def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): + super().__init__(src_dsn, src_schema, metadata, config) + self.config = config + self.set_prompt() + + def set_prompt(self): + if self.table_index < len(self.table_entries): + entry = self.table_entries[self.table_index] + self.prompt = TYPE_PROMPT[entry.new_type].format(entry.name) + else: + self.prompt = "(table) " + def set_type(self, t_type: TableType): + if self.table_index < len(self.table_entries): + entry = self.table_entries[self.table_index] + entry.new_type = t_type def copy_entries(self) -> None: tables = self.config.get("tables", {}) for entry in self.table_entries: @@ -119,33 +161,22 @@ def copy_entries(self) -> None: if entry.new_type == TableType.IGNORE: table["ignore"] = True table.pop("vocabulary_table", None) + table.pop("primary_private", None) elif entry.new_type == TableType.VOCABULARY: table.pop("ignore", None) table["vocabulary_table"] = True + table.pop("primary_private", None) + elif entry.new_type == TableType.PRIVATE: + table.pop("ignore", None) + table.pop("vocabulary_table", None) + table["primary_private"] = True else: table.pop("ignore", None) table.pop("vocabulary_table", None) + table.pop("primary_private", None) tables[entry.name] = table self.config["tables"] = tables - def print(self, text: str, *args, **kwargs): - print(text.format(*args, **kwargs)) - def print_table(self, headings: list[str], rows: list[list[str]]): - output = PrettyTable() - output.field_names = headings - for row in rows: - output.add_row(row) - print(output) - def print_results(self, result): - self.print_table( - list(result.keys()), - [list(row) for row in result.all()] - ) - def ask_save(self): - ask = AskSaveCmd() - ask.cmdloop() - return ask.result - def do_quit(self, _arg): "Check the updates, save them if desired and quit the configurer." count = 0 @@ -183,7 +214,7 @@ def do_next(self, _arg): if index is None: self.print(self.ERROR_NO_SUCH_TABLE, _arg) return - self.set_index(index) + self.set_table_index(index) return self.next_table(self.ERROR_NO_MORE_TABLES) def complete_next(self, text, line, begidx, endidx): @@ -194,7 +225,7 @@ def complete_next(self, text, line, begidx, endidx): ] def do_previous(self, _arg): "Go to the previous table" - if not self.set_index(self.table_index - 1): + if not self.set_table_index(self.table_index - 1): self.print(self.ERROR_ALREADY_AT_START) def do_ignore(self, _arg): "Set the current table as ignored, and go to the next table" @@ -206,8 +237,13 @@ def do_vocabulary(self, _arg): self.set_type(TableType.VOCABULARY) self.print("Table {} set to be a vocabulary table", self.table_name()) self.next_table() - def do_reset(self, _arg): - "Set the current table as neither a vocabulary table nor ignored, and go to the next table" + def do_private(self, _arg): + "Set the current table as a primary private table (such as the table of patients)" + self.set_type(TableType.PRIVATE) + self.print("Table {} set to be a primary private table", self.table_name()) + self.next_table() + def do_normal(self, _arg): + "Set the current table as neither a vocabulary table nor ignored nor primary private, and go to the next table" self.set_type(TableType.NORMAL) self.print("Table {} reset", self.table_name()) self.next_table() @@ -296,3 +332,269 @@ def update_config_tables(src_dsn: str, src_schema: str, metadata: MetaData, conf with TableCmd(src_dsn, src_schema, metadata, config) as tc: tc.cmdloop() return tc.config + + +class Generator(ABC): + @abstractmethod + def name(self) -> str: + """Get the name of this generator.""" + @abstractmethod + def kws(self) -> list[str]: + """Get a list of names of kwargs that this generator wants.""" + #... + + +@dataclass +class GeneratorInfo: + column: str + is_primary_key: bool + old_name: str | None + new_name: str | None + +@dataclass +class GeneratorCmdTableEntry(TableEntry): + generators: list[GeneratorInfo] + +class GeneratorCmd(DbCmd): + intro = "Interactive generator configuration. Type ? for help.\n" + prompt = "(generatorconf) " + file = None + + def make_table_entry(self, name: str) -> TableEntry: + tables = self.config.get("tables", {}) + table = tables.get(name, {}) + metadata_table = self.metadata.tables[name] + columns = set(metadata_table.columns.keys()) + generator_infos: list[GeneratorInfo] = [] + multiple_columns_assigned: dict[str, list[str]] = {} + for rg in table.get("row_generators", []): + gen_name = rg.get("name", None) + if gen_name: + ca = rg.get("columns_assigned", []) + single_ca = None + if isinstance(ca, str): + if ca in columns: + columns.remove(ca) + single_ca = ca + else: + self.print( + "table '{0}' has '{1}' assigned to column '{2}' which is not in this table", + name, gen_name, ca, + ) + else: + columns.difference_update(ca) + if len(ca) == 1: + single_ca = ca + if single_ca is not None: + generator_infos.append(GeneratorInfo( + column=single_ca, + is_primary_key=metadata_table.columns[single_ca].primary_key, + old_name=gen_name, + new_name=gen_name, + )) + else: + multiple_columns_assigned[gen_name] = ca + for col in columns: + generator_infos.append(GeneratorInfo( + column=col, + is_primary_key=metadata_table.columns[col].primary_key, + old_name=None, + new_name=None, + )) + if multiple_columns_assigned: + self.print( + "The following mulit-column generators for table {0} are defined in the configuration file and cannot be configured with this command", + name, + ) + for (gen_name, cols) in multiple_columns_assigned.items(): + self.print(" {0}: {1}", gen_name, cols) + if len(generator_infos) == 0: + return None + return GeneratorCmdTableEntry( + name=name, + generators=generator_infos + ) + + def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): + super().__init__(src_dsn, src_schema, metadata, config) + self.generator_index = 0 + self.set_prompt() + + def set_table_index(self, index): + ret = super().set_table_index(index) + if ret: + self.generator_index = 0 + return ret + + def previous_table(self): + ret = self.set_table_index(self.table_index - 1) + if ret: + table = self.get_table() + if table is None: + self.print("Internal error! table {0} does not have any generators!", self.table_index) + return False + self.generator_index = len(table.generators) - 1 + return ret + + def get_table(self) -> GeneratorCmdTableEntry | None: + if self.table_index < len(self.table_entries): + return self.table_entries[self.table_index] + return None + + def get_table_and_generator(self) -> tuple[str | None, GeneratorInfo | None]: + if self.table_index < len(self.table_entries): + entry = self.table_entries[self.table_index] + if self.generator_index < len(entry.generators): + return (entry.name, entry.generators[self.generator_index]) + return (entry.name, None) + return (None, None) + + def get_column_name(self) -> str | None: + (_, generator_info) = self.get_table_and_generator() + return generator_info.column if generator_info else None + + def set_prompt(self): + (table_name, gen_info) = self.get_table_and_generator() + if table_name is None: + self.prompt = "(generators) " + return + if gen_info is None: + self.prompt = "({table}) ".format(table_name) + return + if gen_info.is_primary_key: + column = f"{gen_info.column}[pk]" + else: + column = gen_info.column + if gen_info.new_name: + self.prompt = "({table}.{column} ({generator})) ".format( + table=table_name, + column=column, + generator=gen_info.new_name, + ) + else: + self.prompt = "({table}.{column}) ".format( + table=table_name, + column=column, + ) + + def set_generator(self, generator: str): + if self.table_index < len(self.table_entries): + entry = self.table_entries[self.table_index] + if self.generator_index < len(entry.generators): + entry.generators[self.generator_index] = generator + + def copy_entries(self) -> None: + tables = self.config.get("tables", {}) + for entry in self.table_entries: + # We probably need to reconstruct row_generators. Hmmm. + # We will need to keep row_generators intact not break them apart like now + for generator in entry.generators: + pass + self.config["tables"] = tables + + def do_quit(self, _arg): + "Check the updates, save them if desired and quit the configurer." + count = 0 + for entry in self.table_entries: + header_shown = False + for gen in entry.generators: + if gen.old_name != gen.new_name: + if not header_shown: + header_shown = True + self.print("Table {0}:", entry.name) + count += 1 + self.print( + "...changing {0} from {1} to {2}", + gen.name, + gen.old_name, + gen.new_name, + ) + if count == 0: + self.print("There are no changes.") + return True + reply = self.ask_save() + if reply == "yes": + self.copy_entries() + return True + if reply == "no": + return True + return False + + def do_tables(self, arg): + "list the tables" + for entry in self.table_entries: + gen_count = len(entry.generators) + how_many = "one generator" if gen_count == 1 else f"{gen_count} generators" + self.print("{0} ({1})", entry.name, how_many) + + def do_list(self, arg): + "list the generators in the current table" + if len(self.table_entries) <= self.table_index: + self.print("Error: no table {0}", self.table_index) + return + for gen in self.table_entries[self.table_index].generators: + old = "" if gen.old_name is None else gen.old_name + if gen.old_name == gen.new_name: + becomes = "" + if old == "": + old = "(not set)" + elif gen.new_name is None: + becomes = "(delete)" + else: + becomes = f"->{gen.new_name}" + primary = "[primary-key]" if gen.is_primary else "" + self.print("{0}{1}{2} {3}", old, becomes, primary, gen.column) + + def do_columns(self, _arg): + "Report the column names" + self.columnize(self.table_metadata().columns.keys()) + + def do_next(self, _arg): + "Go to the next generator" + table = self.get_table() + if table is None: + self.print("No more tables") + next_gi = self.generator_index + 1 + if next_gi == len(table.generators): + self.next_table() + return + self.generator_index = next_gi + self.set_prompt() + + def do_previous(self, _arg): + "Go to the previous generator" + if self.generator_index == 0: + self.previous_table() + else: + self.generator_index -= 1 + self.set_prompt() + + def do_data(self, arg: str): + """ Report some random data from the source versus the old and new generators. """ + args = arg.split() + source = self.get_column_data(20) + self.print_table(["Source data"], [[s] for s in source]) + + def get_column_data(self, count: int, min_length: int = 0): + column = self.get_column_name() + where = "" + if 0 < min_length: + where = "WHERE LENGTH({column}) >= {len}".format( + column=column, + len=min_length, + ) + result = self.connection.execute( + text("SELECT {column} FROM {table} {where} ORDER BY RANDOM() LIMIT {count}".format( + table=self.table_name(), + column=column, + count=count, + where=where, + )) + ) + return [str(x[0]) for x in result.all()] + + +def update_config_generators(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): + with GeneratorCmd(src_dsn, src_schema, metadata, config) as gc: + gc.cmdloop() + return gc.config diff --git a/sqlsynthgen/json_schemas/config_schema.json b/sqlsynthgen/json_schemas/config_schema.json index 40070061..77db08c2 100644 --- a/sqlsynthgen/json_schemas/config_schema.json +++ b/sqlsynthgen/json_schemas/config_schema.json @@ -153,6 +153,10 @@ "description": "Whether to export the table data.", "type": "boolean" }, + "primary_private": { + "description": "Whether the table is a Primary Private table (perhaps a table of patients).", + "type": "boolean" + }, "num_rows_per_pass": { "description": "The number of rows to generate per pass.", "type": "integer" diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 1de2fd12..8650ff04 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -13,7 +13,7 @@ from typer import Argument, Option, Typer from sqlsynthgen.create import create_db_data, create_db_tables, create_db_vocab -from sqlsynthgen.interactive import update_config_tables +from sqlsynthgen.interactive import update_config_tables, update_config_generators from sqlsynthgen.make import ( make_src_stats, make_table_generators, @@ -335,7 +335,7 @@ def configure_tables( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), ): """ - Interactively set tables to ignored or vocabulary. + Interactively set tables to ignored, vocabulary or primary private. """ logger.debug("Configuring tables in %s.", config_file) settings = get_settings() @@ -355,6 +355,31 @@ def configure_tables( logger.debug("Tables configured in %s.", config_file) +@app.command() +def configure_generators( + config_file: Optional[str] = Option(CONFIG_FILENAME, help="Path of the configuration file to alter"), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), +): + """ + Interactively set generators for column data. + """ + logger.debug("Configuring generators in %s.", config_file) + settings = get_settings() + src_dsn: str = _require_src_db_dsn(settings) + config_file_path = Path(config_file) + config = {} + if config_file_path.exists(): + config = yaml.load(config_file_path.read_text(encoding="UTF-8"), Loader=yaml.SafeLoader) + metadata = load_metadata(orm_file, config) + config_updated = update_config_generators(src_dsn, settings.src_schema, metadata, config) + if config_updated is None: + logger.debug("Cancelled") + return + content = yaml.dump(config_updated) + config_file_path.write_text(content, encoding="utf-8") + logger.debug("Generators configured in %s.", config_file) + + @app.command() def validate_config( config_file: Path = Argument(help="The configuration file to validate"), diff --git a/tests/test_interactive.py b/tests/test_interactive.py index 521a8ae5..883e6528 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -69,7 +69,7 @@ def test_column_display(self) -> None: self.assertListEqual(tc.column_items, [["id", "a", "b", "c"]]) def test_configure_tables(self) -> None: - """Test that we can change columns to ignore, vocab or reset.""" + """Test that we can change columns to ignore, vocab or normal.""" metadata = MetaData() metadata.reflect(self.engine) config = { @@ -84,7 +84,7 @@ def test_configure_tables(self) -> None: } with TestTableCmd(self.dsn, self.schema_name, metadata, config) as tc: tc.do_next("unique_constraint_test") - tc.do_reset("") + tc.do_normal("") tc.do_next("person") tc.do_vocabulary("") tc.do_next("mitigation_type") From 0276ef353bd47688d0b5988e1a3d24865b906946 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 4 Apr 2025 20:00:18 +0100 Subject: [PATCH 42/85] new generators implementation! --- sqlsynthgen/base.py | 9 + sqlsynthgen/generators.py | 408 +++++++++++++++++++++++++++++++++++++ sqlsynthgen/interactive.py | 24 ++- 3 files changed, 438 insertions(+), 3 deletions(-) create mode 100644 sqlsynthgen/generators.py diff --git a/sqlsynthgen/base.py b/sqlsynthgen/base.py index b59b734c..eed59019 100644 --- a/sqlsynthgen/base.py +++ b/sqlsynthgen/base.py @@ -2,6 +2,8 @@ from abc import ABC, abstractmethod from collections.abc import Callable from dataclasses import dataclass +import functools +import math import numpy import os from pathlib import Path @@ -20,6 +22,7 @@ table_row_count, ) +@functools.cache def zipf_weights(size): total = sum(map(lambda n: 1/n, range(1, size + 1))) return [ @@ -29,12 +32,18 @@ def zipf_weights(size): class DistributionGenerator: + root3 = math.sqrt(3) def __init__(self): self.rng = numpy.random.default_rng() def uniform(self, low: float, high: float) -> float: return self.rng.uniform(low=low, high=high) + def uniform_ms(self, mean, sd) -> float: + m = float(mean) + h = self.root3 * float(sd) + return self.rng.uniform(low=m - h, high=m + h) + def normal(self, mean: float, sd: float) -> float: return self.rng.normal(loc=mean, scale=sd) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py new file mode 100644 index 00000000..1912c6fc --- /dev/null +++ b/sqlsynthgen/generators.py @@ -0,0 +1,408 @@ +""" +Generator factories for making generators for single columns. +""" + +from abc import ABC, abstractmethod +import decimal +import math +import mimesis +import mimesis.locales +from sqlalchemy import Column, Connection, text +from sqlalchemy.types import Integer, Numeric, String + +from sqlsynthgen.base import DistributionGenerator + +dist_gen = DistributionGenerator() +generic = mimesis.Generic(locale=mimesis.locales.Locale.EN_GB) + +class Generator(ABC): + """ + Random data generator. + + A generator is specific to a particular column in a particular table in + a particluar database. + + A generator knows how to fetch its summary data from the database, how to calculate + its fit (if apropriate) and which function actually does the generation. + + It also knows these summary statistics for the column it was instantiated on, + and therefore knows how to generate fake data for that column. + """ + @abstractmethod + def function_name(self) -> str: + """ The name of the generator function to put into ssg.py. """ + + @abstractmethod + def nominal_kwargs(self) -> dict[str, str]: + """ + The kwargs the generator wants to be called with. + The values will tend to be references to something in the src-stats.yaml + file (for example 'SRC_STATS["auto_patient"]["age_mean"]') + """ + + def select_aggregate_clauses(self) -> dict[str, str]: + """ + SQL clauses to add to a SELECT ... FROM {table} query. + + Will add to SRC_STATS{"auto__{table}") + For example {"count": "COUNT(*)", "avg_thiscolumn": "AVG(thiscolumn)"} + will make the clause become: + SELECT COUNT(*) AS count, AVG(thiscolumn) AS avg_thiscolumn FROM thistable + """ + return {} + + def custom_queries(self) -> dict[str, str]: + """ + SQL queries to add to SRC_STATS. + + Should be used for queries that do not follow the SELECT ... FROM table format, + because these should use select_aggregate_clauses. + """ + return {} + + @abstractmethod + def actual_kwargs(self) -> dict[str, any]: + """ + The kwargs (summary statistics) this generator is instantiated with. + """ + + @abstractmethod + def generate_data(self, count) -> list[any]: + """ + Generate 'count' random data points for this column. + """ + + def fit(self) -> float | None: + """ + Return a value representing how well the distribution fits the real source data. + + 0.0 means "perfectly". + None means undefined. + """ + return None + + +class GeneratorFactory(ABC): + """ + A factory for making generators appropriate for a database column. + """ + @abstractmethod + def get_generators(self, column: Column, connection) -> list[Generator]: + """ + Returns all the generators that might be appropriate for this column. + """ + + +class MultiGeneratorFactory(GeneratorFactory): + """ A composite factory. """ + def __init__(self, factories: list[GeneratorFactory]): + super().__init__() + self.factories = factories + + def get_generators(self, column, connection) -> list[Generator]: + return [ + generator + for factory in self.factories + for generator in factory.get_generators(column, connection) + ] + +class MimesisGenerator(Generator): + def __init__(self, function_name: str): + """ Function name is relative to 'generic', for example 'person.name' """ + super().__init__() + f = generic + for part in function_name.split("."): + if not hasattr(f, part): + raise Exception(f"Mimesis does not have a function {function_name}: {part} not found") + f = getattr(f, part) + if not callable(f): + raise Exception(f"Mimesis object {function_name} is not a callable, so cannot be used as a generator") + self.name = "generic." + function_name + self.generator_function = f + def function_name(self): + return self.name + def nominal_kwargs(self): + return {} + def actual_kwargs(self): + return {} + def generate_data(self, count): + return [ + self.generator_function() + for _ in range(count) + ] + +class MimesisStringGeneratorFactory(GeneratorFactory): + """ + All Mimesis generators that return strings. + """ + def get_generators(self, column, connection): + if not isinstance(column.type.as_generic(), String): + return [] + return list(map(MimesisGenerator, [ + "address.calling_code", + "address.city", + "address.continent", + "address.country", + "address.country_code", + "address.postal_code", + "address.province", + "address.street_number", + "address.street_name", + "address.street_suffix", + "person.blood_type", + "person.email", + "person.first_name", + "person.last_name", + "person.full_name", + "person.gender", + "person.language", + "person.nationality", + "person.occupation", + "person.password", + "person.title", + "person.university", + "person.username", + "person.worldview", + "text.answer", + "text.color", + "text.level", + "text.quote", + "text.sentence", + "text.text", + "text.word", + ])) + +class MimesisFloatGeneratorFactory(GeneratorFactory): + """ + All Mimesis generators that return floating point numbers. + """ + def get_generators(self, column, connection): + if not isinstance(column.type.as_generic(), Numeric): + return [] + return list(map(MimesisGenerator, [ + "person.height", + ])) + +class MimesisIntegerGeneratorFactory(GeneratorFactory): + """ + All Mimesis generators that return integers. + """ + def get_generators(self, column, connection): + ct = column.type.as_generic() + if not isinstance(ct, Numeric) and not isinstance(ct, Integer): + return [] + return list(map(MimesisGenerator, [ + "person.weight", + "person.age", + ])) + + +def fit_from_buckets(xs: list[float], ys: list[float]): + sum_diff_squared = sum(map(lambda t, a: (t - a)*(t - a), xs, ys)) + return math.sqrt(sum_diff_squared / len(ys)) + + +class ContinuousDistributionGenerator(Generator): + def __init__(self, table_name: str, column_name: str, mean: float, stddev: float, buckets: list[float]): + super().__init__() + self.table_name = table_name + self.column_name = column_name + self.mean = mean + self.stddev = stddev + self._fit = fit_from_buckets(self.expected_buckets, buckets) + def nominal_kwargs(self): + return { + "mean": f'SRC_STATS["auto__{self.table_name}"]["mean__{self.column_name}"]', + "sd": f'SRC_STATS["auto__{self.table_name}"]["stddev__{self.column_name}"]', + } + def actual_kwargs(self): + return { + "mean": self.mean, + "sd": self.stddev, + } + def select_aggregate_clauses(self): + clauses = super().select_aggregate_clauses() + return { + **clauses, + f"mean__{self.column_name}": f"AVG({self.column_name})", + f"stddev__{self.column_name}": f"STDDEV({self.column_name})", + } + def fit(self): + return self._fit + + +class GaussianGenerator(ContinuousDistributionGenerator): + expected_buckets = [0.0227, 0.0441, 0.0918, 0.1499, 0.1915, 0.1915, 0.1499, 0.0918, 0.0441, 0.0227] + def function_name(self): + return "dist_gen.normal" + def generate_data(self, count): + return [ + dist_gen.normal(self.mean, self.stddev) + for _ in range(count) + ] + + +class UniformGenerator(ContinuousDistributionGenerator): + expected_buckets = [0, 0.06698, 0.14434, 0.14434, 0.14434, 0.14434, 0.14434, 0.14434, 0.06698, 0] + def function_name(self): + return "dist_gen.uniform_ms" + def generate_data(self, count): + return [ + dist_gen.uniform_ms(self.mean, self.stddev) + for _ in range(count) + ] + + +class ContinuousDistributionGeneratorFactory(GeneratorFactory): + """ + All generators that want an average and standard deviation. + """ + def get_generators(self, column, connection: Connection): + ct = column.type.as_generic() + if not isinstance(ct, Numeric) and not isinstance(ct, Integer): + return [] + column_name = column.name + table_name = column.table.name + result = connection.execute( + text("SELECT AVG({column}) AS mean, STDDEV({column}) AS stddev, COUNT({column}) AS count FROM {table}".format( + table=table_name, + column=column_name, + )) + ).first() + if result is None: + return [] + raw_buckets = connection.execute(text( + "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( + column=column.name, table=column.table.name, x=result.mean - 2 * result.stddev, w = result.stddev / 2 + ) + )) + buckets = [0] * 10 + for rb in raw_buckets: + if rb.b is not None: + bucket = min(9, max(0, int(rb.b) + 1)) + buckets[bucket] += rb.f / result.count + return [ + GaussianGenerator(table_name, column_name, result.mean, result.stddev, buckets), + UniformGenerator(table_name, column_name, result.mean, result.stddev, buckets), + ] + + +def zipf_distribution(total, bins): + basic_dist = list(map(lambda n: 1/n, range(1, bins + 1))) + bd_remaining = sum(basic_dist) + for b in basic_dist: + # yield b/bd_remaining of the `total` remaining + if bd_remaining == 0: + yield 0 + else: + x = math.floor(0.5 + total * b / bd_remaining) + bd_remaining -= x * bd_remaining / total + total -= x + yield x + + +class ChoiceGenerator(Generator): + def __init__(self, table_name, column_name, values, counts): + super().__init__() + self.table_name = table_name + self.column_name = column_name + self.values = values + estimated_counts = self.get_estimated_counts(counts) + self._fit = fit_from_buckets(counts, estimated_counts) + def nominal_kwargs(self): + return { + "a": f'SRC_STATS["auto__{self.table_name}__{self.column_name}"]["value"]', + } + def actual_kwargs(self): + return { + "a": self.values, + } + def custom_queries(self): + qs = super().custom_queries() + t = self.table_name + c = self.column_name + return { + **qs, + f"auto__{t}__{c}": f"SELECT {c} AS value FROM {t} GROUP BY value ORDER BY COUNT({c}) DESC", + } + def fit(self): + return self._fit + +class ZipfChoiceGenerator(ChoiceGenerator): + def get_estimated_counts(self, counts): + return list(zipf_distribution(sum(counts), len(counts))) + def function_name(self): + return "dist_gen.zipf_choice" + def generate_data(self, count): + return [ + dist_gen.zipf_choice(self.values, len(self.values)) + for _ in range(count) + ] + + +def uniform_distribution(total, bins): + p = total // bins + n = total % bins + for i in range(0, n): + yield p + 1 + for i in range(n, bins): + yield p + + +class UniformChoiceGenerator(ChoiceGenerator): + def get_estimated_counts(self, counts): + return list(uniform_distribution(sum(counts), len(counts))) + def function_name(self): + return "dist_gen.choice" + def generate_data(self, count): + return [ + dist_gen.choice(self.values) + for _ in range(count) + ] + + +class ChoiceGeneratorFactory(GeneratorFactory): + """ + All generators that want an average and standard deviation. + """ + def get_generators(self, column, connection: Connection): + ct = column.type.as_generic() + if not isinstance(ct, Numeric) and not isinstance(ct, Integer): + return [] + column_name = column.name + table_name = column.table.name + results = connection.execute( + text("SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC".format( + table=table_name, + column=column_name, + )) + ) + if results is None: + return [] + values = [] # The values found + counts = [] # The number or each value + total = 0 # total number of non-NULL results + for result in results: + c = result.f + if c != 0: + total += c + counts.append(c) + v = result.v + if type(v) is decimal.Decimal: + v = float(v) + values.append(v) + if not counts or 500 < len(counts): + return [] + return [ + ZipfChoiceGenerator(table_name, column_name, values, counts), + UniformChoiceGenerator(table_name, column_name, values, counts), + ] + + +everything_factory = MultiGeneratorFactory([ + MimesisStringGeneratorFactory(), + MimesisIntegerGeneratorFactory(), + MimesisFloatGeneratorFactory(), + ContinuousDistributionGeneratorFactory(), + ChoiceGeneratorFactory(), +]) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 2f6b9020..2528d171 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -6,8 +6,9 @@ import logging from prettytable import PrettyTable -from sqlalchemy import MetaData, Table, text +from sqlalchemy import Column, MetaData, Table, text +from sqlsynthgen.generators import everything_factory from sqlsynthgen.utils import create_db_engine logger = logging.getLogger(__name__) @@ -424,6 +425,7 @@ def set_table_index(self, index): ret = super().set_table_index(index) if ret: self.generator_index = 0 + self.set_prompt() return ret def previous_table(self): @@ -453,13 +455,20 @@ def get_column_name(self) -> str | None: (_, generator_info) = self.get_table_and_generator() return generator_info.column if generator_info else None + def column_metadata(self) -> Column | None: + table = self.table_metadata() + column_name = self.get_column_name() + if table is None or column_name is None: + return None + return table.columns[column_name] + def set_prompt(self): (table_name, gen_info) = self.get_table_and_generator() if table_name is None: self.prompt = "(generators) " return if gen_info is None: - self.prompt = "({table}) ".format(table_name) + self.prompt = "({table}) ".format(table=table_name) return if gen_info.is_primary_key: column = f"{gen_info.column}[pk]" @@ -542,7 +551,7 @@ def do_list(self, arg): becomes = "(delete)" else: becomes = f"->{gen.new_name}" - primary = "[primary-key]" if gen.is_primary else "" + primary = "[primary-key]" if gen.is_primary_key else "" self.print("{0}{1}{2} {3}", old, becomes, primary, gen.column) def do_columns(self, _arg): @@ -593,6 +602,15 @@ def get_column_data(self, count: int, min_length: int = 0): ) return [str(x[0]) for x in result.all()] + def do_propose(self, arg): + column = self.column_metadata() + if column is None: + self.print("Error: No such column") + return + gens = everything_factory.get_generators(column, self.connection) + for gen in gens: + self.print("{0}: (fit: {1}) {2}...", gen.function_name(), gen.fit(), gen.generate_data(5)) + def update_config_generators(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): with GeneratorCmd(src_dsn, src_schema, metadata, config) as gc: From 54e061efffa8ea74805821246154160612bd1aee Mon Sep 17 00:00:00 2001 From: Tim Band Date: Mon, 7 Apr 2025 18:16:29 +0100 Subject: [PATCH 43/85] configure-generators gains "compare" command Proposals are sorted. Text generators gain a fit (based on length) --- sqlsynthgen/generators.py | 135 ++++++++++++++++++++++++++++--------- sqlsynthgen/interactive.py | 80 +++++++++++++++++----- tests/test_interactive.py | 3 + 3 files changed, 170 insertions(+), 48 deletions(-) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index 1912c6fc..3bd54719 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -9,6 +9,7 @@ import mimesis.locales from sqlalchemy import Column, Connection, text from sqlalchemy.types import Integer, Numeric, String +from typing import Callable from sqlsynthgen.base import DistributionGenerator @@ -93,6 +94,59 @@ def get_generators(self, column: Column, connection) -> list[Generator]: """ +class Buckets: + """ + Finds the real distribution of continuous data so that we can measure + the fit of generators against it. + """ + def __init__(self, connection: Connection, table_name: str, column_name: str, mean:float, stddev: float, count: int): + raw_buckets = connection.execute(text( + "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( + column=column_name, table=table_name, x=mean - 2 * stddev, w = stddev / 2 + ) + )) + self.buckets = [0] * 10 + for rb in raw_buckets: + if rb.b is not None: + bucket = min(9, max(0, int(rb.b) + 1)) + self.buckets[bucket] += rb.f / count + self.mean = mean + self.stddev = stddev + + @classmethod + def make_buckets(_cls, connection: Connection, table_name: str, column_name: str): + """ + Construct a Buckets object. + """ + result = connection.execute( + text("SELECT AVG({column}) AS mean, STDDEV({column}) AS stddev, COUNT({column}) AS count FROM {table}".format( + table=table_name, + column=column_name, + )) + ).first() + if result is None: + return None + return Buckets(connection, table_name, column_name, result.mean, result.stddev, result.count) + + def fit_from_counts(self, bucket_counts: list[float]) -> float: + """ + Figure out the fit from bucket counts from the generator distribution. + """ + return fit_from_buckets(self.buckets, bucket_counts) + + def fit_from_values(self, values: list[float]) -> float: + """ + Figure out the fit from samples from the generator distribution. + """ + buckets = [0] * 10 + x=self.mean - 2 * self.stddev + w = self.stddev / 2 + for v in values: + b = min(9, max(0, int((v - x)/w))) + buckets[b] += 1 + return self.fit_from_counts(buckets) + + class MultiGeneratorFactory(GeneratorFactory): """ A composite factory. """ def __init__(self, factories: list[GeneratorFactory]): @@ -106,9 +160,23 @@ def get_generators(self, column, connection) -> list[Generator]: for generator in factory.get_generators(column, connection) ] + class MimesisGenerator(Generator): - def __init__(self, function_name: str): - """ Function name is relative to 'generic', for example 'person.name' """ + def __init__( + self, + function_name: str, + value_fn: Callable[[any], float] | None=None, + buckets: Buckets | None=None, + ): + """ + Generator from Mimesis. + + :param: function_name is relative to 'generic', for example 'person.name'. + :param: value_fn Function to convert generator output to floats, if needed. The values + thus produced are compared against the buckets to estimate the fit. + :param: buckets The distribution of string lengths in the real data. If this is None + then the fit method will return None. + """ super().__init__() f = generic for part in function_name.split("."): @@ -119,6 +187,16 @@ def __init__(self, function_name: str): raise Exception(f"Mimesis object {function_name} is not a callable, so cannot be used as a generator") self.name = "generic." + function_name self.generator_function = f + if buckets is None: + self._fit = None + return + samples = self.generate_data(400) + if value_fn: + samples = [ + value_fn(s) + for s in samples + ] + self._fit = buckets.fit_from_values(samples) def function_name(self): return self.name def nominal_kwargs(self): @@ -130,6 +208,9 @@ def generate_data(self, count): self.generator_function() for _ in range(count) ] + def fit(self): + return self._fit + class MimesisStringGeneratorFactory(GeneratorFactory): """ @@ -138,7 +219,12 @@ class MimesisStringGeneratorFactory(GeneratorFactory): def get_generators(self, column, connection): if not isinstance(column.type.as_generic(), String): return [] - return list(map(MimesisGenerator, [ + buckets = Buckets.make_buckets( + connection, + column.table.name, + f"LENGTH({column.name})", + ) + return list(map(lambda gen: MimesisGenerator(gen, len, buckets), [ "address.calling_code", "address.city", "address.continent", @@ -199,26 +285,26 @@ def get_generators(self, column, connection): def fit_from_buckets(xs: list[float], ys: list[float]): sum_diff_squared = sum(map(lambda t, a: (t - a)*(t - a), xs, ys)) - return math.sqrt(sum_diff_squared / len(ys)) + return sum_diff_squared / len(ys) class ContinuousDistributionGenerator(Generator): - def __init__(self, table_name: str, column_name: str, mean: float, stddev: float, buckets: list[float]): + def __init__(self, table_name: str, column_name: str, buckets: Buckets): super().__init__() self.table_name = table_name self.column_name = column_name - self.mean = mean - self.stddev = stddev - self._fit = fit_from_buckets(self.expected_buckets, buckets) + self.buckets = buckets def nominal_kwargs(self): return { "mean": f'SRC_STATS["auto__{self.table_name}"]["mean__{self.column_name}"]', "sd": f'SRC_STATS["auto__{self.table_name}"]["stddev__{self.column_name}"]', } def actual_kwargs(self): + if self.buckets is None: + return {} return { - "mean": self.mean, - "sd": self.stddev, + "mean": self.buckets.mean, + "sd": self.buckets.stddev, } def select_aggregate_clauses(self): clauses = super().select_aggregate_clauses() @@ -228,7 +314,9 @@ def select_aggregate_clauses(self): f"stddev__{self.column_name}": f"STDDEV({self.column_name})", } def fit(self): - return self._fit + if self.buckets is None: + return None + return self.buckets.fit_from_counts(self.expected_buckets) class GaussianGenerator(ContinuousDistributionGenerator): @@ -257,33 +345,16 @@ class ContinuousDistributionGeneratorFactory(GeneratorFactory): """ All generators that want an average and standard deviation. """ - def get_generators(self, column, connection: Connection): + def get_generators(self, column: Column, connection: Connection): ct = column.type.as_generic() if not isinstance(ct, Numeric) and not isinstance(ct, Integer): return [] column_name = column.name table_name = column.table.name - result = connection.execute( - text("SELECT AVG({column}) AS mean, STDDEV({column}) AS stddev, COUNT({column}) AS count FROM {table}".format( - table=table_name, - column=column_name, - )) - ).first() - if result is None: - return [] - raw_buckets = connection.execute(text( - "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( - column=column.name, table=column.table.name, x=result.mean - 2 * result.stddev, w = result.stddev / 2 - ) - )) - buckets = [0] * 10 - for rb in raw_buckets: - if rb.b is not None: - bucket = min(9, max(0, int(rb.b) + 1)) - buckets[bucket] += rb.f / result.count + buckets = Buckets.make_buckets(connection, table_name, column_name) return [ - GaussianGenerator(table_name, column_name, result.mean, result.stddev, buckets), - UniformGenerator(table_name, column_name, result.mean, result.stddev, buckets), + GaussianGenerator(table_name, column_name, buckets), + UniformGenerator(table_name, column_name, buckets), ] diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 2528d171..296fa739 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -91,6 +91,11 @@ def print_table(self, headings: list[str], rows: list[list[str]]): for row in rows: output.add_row(row) print(output) + def print_table_by_columns(self, columns: dict[str, list[str]]): + output = PrettyTable() + for field_name, data in columns.items(): + output.add_column(field_name, data) + print(output) def print_results(self, result): self.print_table( list(result.keys()), @@ -378,8 +383,8 @@ def make_table_entry(self, name: str) -> TableEntry: columns.remove(ca) single_ca = ca else: - self.print( - "table '{0}' has '{1}' assigned to column '{2}' which is not in this table", + logger.warning( + "table '%s' has '%s' assigned to column '%s' which is not in this table", name, gen_name, ca, ) else: @@ -419,6 +424,7 @@ def make_table_entry(self, name: str) -> TableEntry: def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): super().__init__(src_dsn, src_schema, metadata, config) self.generator_index = 0 + self.generators_valid_indices = None self.set_prompt() def set_table_index(self, index): @@ -578,13 +584,43 @@ def do_previous(self, _arg): self.generator_index -= 1 self.set_prompt() - def do_data(self, arg: str): - """ Report some random data from the source versus the old and new generators. """ - args = arg.split() - source = self.get_column_data(20) - self.print_table(["Source data"], [[s] for s in source]) + def get_generator_proposals(self) -> list[Generator]: + if (self.table_index, self.generator_index) != self.generators_valid_indices: + self.generators = None + if self.generators is None: + column = self.column_metadata() + if column is None: + logger.error("No such column") + return [] + gens = everything_factory.get_generators(column, self.connection) + gens.sort(key=lambda g: g.fit()) + self.generators = gens + return self.generators + + def do_compare(self, arg: str): + """ + Compare the real data with some generators. - def get_column_data(self, count: int, min_length: int = 0): + 'compare': just look at some source data from this column. + 'compare 5 6 10': compare a sample of the source data with a sample + from generators 5, 6 and 10. You can find out which numbers + correspond to which generators using the 'propose' command. + """ + args = arg.split() + limit = 20 + comparison = { + "source": self.get_column_data(limit, to_str=str), + } + gens = self.get_generator_proposals() + for argument in args: + if argument.isnumeric(): + n = int(argument) + if 0 < n and n <= len(gens): + gen = gens[n - 1] + comparison[gen.function_name()] = gen.generate_data(limit) + self.print_table_by_columns(comparison) + + def get_column_data(self, count: int, to_str=repr, min_length: int = 0): column = self.get_column_name() where = "" if 0 < min_length: @@ -600,16 +636,28 @@ def get_column_data(self, count: int, min_length: int = 0): where=where, )) ) - return [str(x[0]) for x in result.all()] + return [to_str(x[0]) for x in result.all()] def do_propose(self, arg): - column = self.column_metadata() - if column is None: - self.print("Error: No such column") - return - gens = everything_factory.get_generators(column, self.connection) - for gen in gens: - self.print("{0}: (fit: {1}) {2}...", gen.function_name(), gen.fit(), gen.generate_data(5)) + """ + Display a list of possible generators for this column. + + They will be listed in order of fit, the most likely matches first. + The results can be compared (against a sample of the real data in + the column and against each other) with the 'compare' command. + """ + limit = 5 + gens = self.get_generator_proposals() + sample = self.get_column_data(limit) + self.print("Sample of actual source data: {0}...", ",".join(sample)) + for index, gen in enumerate(gens): + self.print( + "{index}. {name}: (fit: {fit:.0f}) {sample} ...", + index = index + 1, + name=gen.function_name(), + fit=gen.fit(), + sample=", ".join(map(repr, gen.generate_data(limit))) + ) def update_config_generators(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): diff --git a/tests/test_interactive.py b/tests/test_interactive.py index 883e6528..b31af81b 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -15,11 +15,14 @@ def reset(self): self.headings = [] self.rows = [] self.column_items = [] + self.columns: dict[str, list[str]] = {} def print(self, text: str, *args, **kwargs): self.messages.append((text, args, kwargs)) def print_table(self, headings: list[str], rows: list[list[str]]): self.headings = headings self.rows = rows + def print_table_by_columns(self, columns: dict[str, list[str]]): + self.columns = columns def columnize(self, items): self.column_items.append(items) def ask_save(self): From cebe37fa6bf482a2b83615e769b1a64b5920bc06 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 8 Apr 2025 12:42:18 +0100 Subject: [PATCH 44/85] Fresh connection for each query --- sqlsynthgen/generators.py | 168 +++++++++++++++++++++---------------- sqlsynthgen/interactive.py | 111 +++++++++++++----------- 2 files changed, 159 insertions(+), 120 deletions(-) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index 3bd54719..2e4cc9c9 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -7,7 +7,7 @@ import math import mimesis import mimesis.locales -from sqlalchemy import Column, Connection, text +from sqlalchemy import Column, Engine, text from sqlalchemy.types import Integer, Numeric, String from typing import Callable @@ -38,17 +38,23 @@ def nominal_kwargs(self) -> dict[str, str]: """ The kwargs the generator wants to be called with. The values will tend to be references to something in the src-stats.yaml - file (for example 'SRC_STATS["auto_patient"]["age_mean"]') + file. + For example {"avg_age": 'SRC_STATS["auto__patient"]["age_mean"]'} will + provide the value stored in src-stats.yaml as + SRC_STATS["auto__patient"]["age_mean"] as the "avg_age" argument + to the generator function. """ def select_aggregate_clauses(self) -> dict[str, str]: """ SQL clauses to add to a SELECT ... FROM {table} query. - Will add to SRC_STATS{"auto__{table}") + Will add to SRC_STATS["auto__{table}"] For example {"count": "COUNT(*)", "avg_thiscolumn": "AVG(thiscolumn)"} will make the clause become: - SELECT COUNT(*) AS count, AVG(thiscolumn) AS avg_thiscolumn FROM thistable + "SELECT COUNT(*) AS count, AVG(thiscolumn) AS avg_thiscolumn FROM thistable" + and this will populate SRC_STATS["auto__thistable"]["count"] and + SRC_STATS["auto__thistable"]["avg_thiscolumn"] in the src-stats.yaml file. """ return {} @@ -58,6 +64,10 @@ def custom_queries(self) -> dict[str, str]: Should be used for queries that do not follow the SELECT ... FROM table format, because these should use select_aggregate_clauses. + + For example {"myquery", "SELECT one, too AS two FROM mytable WHERE too > 1"} + will populate SRC_STATS["myquery"]["one"] and SRC_STATS["myquery"]["two"] + in the src-stats.yaml file. """ return {} @@ -73,14 +83,14 @@ def generate_data(self, count) -> list[any]: Generate 'count' random data points for this column. """ - def fit(self) -> float | None: + def fit(self, default=None) -> float | None: """ Return a value representing how well the distribution fits the real source data. 0.0 means "perfectly". - None means undefined. + Returns default if no fitness has been defined. """ - return None + return default class GeneratorFactory(ABC): @@ -88,7 +98,7 @@ class GeneratorFactory(ABC): A factory for making generators appropriate for a database column. """ @abstractmethod - def get_generators(self, column: Column, connection) -> list[Generator]: + def get_generators(self, column: Column, engine: Engine) -> list[Generator]: """ Returns all the generators that might be appropriate for this column. """ @@ -99,34 +109,36 @@ class Buckets: Finds the real distribution of continuous data so that we can measure the fit of generators against it. """ - def __init__(self, connection: Connection, table_name: str, column_name: str, mean:float, stddev: float, count: int): - raw_buckets = connection.execute(text( - "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( - column=column_name, table=table_name, x=mean - 2 * stddev, w = stddev / 2 - ) - )) - self.buckets = [0] * 10 - for rb in raw_buckets: - if rb.b is not None: - bucket = min(9, max(0, int(rb.b) + 1)) - self.buckets[bucket] += rb.f / count - self.mean = mean - self.stddev = stddev + def __init__(self, engine: Engine, table_name: str, column_name: str, mean:float, stddev: float, count: int): + with engine.connect() as connection: + raw_buckets = connection.execute(text( + "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( + column=column_name, table=table_name, x=mean - 2 * stddev, w = stddev / 2 + ) + )) + self.buckets = [0] * 10 + for rb in raw_buckets: + if rb.b is not None: + bucket = min(9, max(0, int(rb.b) + 1)) + self.buckets[bucket] += rb.f / count + self.mean = mean + self.stddev = stddev @classmethod - def make_buckets(_cls, connection: Connection, table_name: str, column_name: str): + def make_buckets(_cls, engine: Engine, table_name: str, column_name: str): """ Construct a Buckets object. """ - result = connection.execute( - text("SELECT AVG({column}) AS mean, STDDEV({column}) AS stddev, COUNT({column}) AS count FROM {table}".format( - table=table_name, - column=column_name, - )) - ).first() - if result is None: - return None - return Buckets(connection, table_name, column_name, result.mean, result.stddev, result.count) + with engine.connect() as connection: + result = connection.execute( + text("SELECT AVG({column}) AS mean, STDDEV({column}) AS stddev, COUNT({column}) AS count FROM {table}".format( + table=table_name, + column=column_name, + )) + ).first() + if result is None: + return None + return Buckets(engine, table_name, column_name, result.mean, result.stddev, result.count) def fit_from_counts(self, bucket_counts: list[float]) -> float: """ @@ -153,11 +165,11 @@ def __init__(self, factories: list[GeneratorFactory]): super().__init__() self.factories = factories - def get_generators(self, column, connection) -> list[Generator]: + def get_generators(self, column: Column, engine: Engine) -> list[Generator]: return [ generator for factory in self.factories - for generator in factory.get_generators(column, connection) + for generator in factory.get_generators(column, engine) ] @@ -208,23 +220,32 @@ def generate_data(self, count): self.generator_function() for _ in range(count) ] - def fit(self): - return self._fit + def fit(self, default=None): + return default if self._fit is None else self._fit class MimesisStringGeneratorFactory(GeneratorFactory): """ All Mimesis generators that return strings. """ - def get_generators(self, column, connection): + def get_generators(self, column: Column, engine: Engine): if not isinstance(column.type.as_generic(), String): return [] - buckets = Buckets.make_buckets( - connection, - column.table.name, - f"LENGTH({column.name})", - ) - return list(map(lambda gen: MimesisGenerator(gen, len, buckets), [ + try: + with engine.connect() as connection: + buckets = Buckets.make_buckets( + connection, + column.table.name, + f"LENGTH({column.name})", + ) + fitness_fn = len + except Exception: + # Some column types that appear to be strings (such as enums) + # cannot have their lengths measured. In this case we cannot + # detect fitness using lengths. + buckets = None + fitness_fn = None + return list(map(lambda gen: MimesisGenerator(gen, fitness_fn, buckets), [ "address.calling_code", "address.city", "address.continent", @@ -262,7 +283,7 @@ class MimesisFloatGeneratorFactory(GeneratorFactory): """ All Mimesis generators that return floating point numbers. """ - def get_generators(self, column, connection): + def get_generators(self, column: Column, _engine: Engine): if not isinstance(column.type.as_generic(), Numeric): return [] return list(map(MimesisGenerator, [ @@ -273,7 +294,7 @@ class MimesisIntegerGeneratorFactory(GeneratorFactory): """ All Mimesis generators that return integers. """ - def get_generators(self, column, connection): + def get_generators(self, column: Column, _engine: Engine): ct = column.type.as_generic() if not isinstance(ct, Numeric) and not isinstance(ct, Integer): return [] @@ -313,9 +334,9 @@ def select_aggregate_clauses(self): f"mean__{self.column_name}": f"AVG({self.column_name})", f"stddev__{self.column_name}": f"STDDEV({self.column_name})", } - def fit(self): + def fit(self, default=None): if self.buckets is None: - return None + return default return self.buckets.fit_from_counts(self.expected_buckets) @@ -325,7 +346,7 @@ def function_name(self): return "dist_gen.normal" def generate_data(self, count): return [ - dist_gen.normal(self.mean, self.stddev) + dist_gen.normal(self.buckets.mean, self.buckets.stddev) for _ in range(count) ] @@ -336,7 +357,7 @@ def function_name(self): return "dist_gen.uniform_ms" def generate_data(self, count): return [ - dist_gen.uniform_ms(self.mean, self.stddev) + dist_gen.uniform_ms(self.buckets.mean, self.buckets.stddev) for _ in range(count) ] @@ -345,13 +366,13 @@ class ContinuousDistributionGeneratorFactory(GeneratorFactory): """ All generators that want an average and standard deviation. """ - def get_generators(self, column: Column, connection: Connection): + def get_generators(self, column: Column, engine: Engine): ct = column.type.as_generic() if not isinstance(ct, Numeric) and not isinstance(ct, Integer): return [] column_name = column.name table_name = column.table.name - buckets = Buckets.make_buckets(connection, table_name, column_name) + buckets = Buckets.make_buckets(engine, table_name, column_name) return [ GaussianGenerator(table_name, column_name, buckets), UniformGenerator(table_name, column_name, buckets), @@ -396,8 +417,8 @@ def custom_queries(self): **qs, f"auto__{t}__{c}": f"SELECT {c} AS value FROM {t} GROUP BY value ORDER BY COUNT({c}) DESC", } - def fit(self): - return self._fit + def fit(self, default=None): + return default if self._fit is None else self._fit class ZipfChoiceGenerator(ChoiceGenerator): def get_estimated_counts(self, counts): @@ -436,32 +457,33 @@ class ChoiceGeneratorFactory(GeneratorFactory): """ All generators that want an average and standard deviation. """ - def get_generators(self, column, connection: Connection): + def get_generators(self, column, engine: Engine): ct = column.type.as_generic() if not isinstance(ct, Numeric) and not isinstance(ct, Integer): return [] column_name = column.name table_name = column.table.name - results = connection.execute( - text("SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC".format( - table=table_name, - column=column_name, - )) - ) - if results is None: - return [] - values = [] # The values found - counts = [] # The number or each value - total = 0 # total number of non-NULL results - for result in results: - c = result.f - if c != 0: - total += c - counts.append(c) - v = result.v - if type(v) is decimal.Decimal: - v = float(v) - values.append(v) + with engine.connect() as connection: + results = connection.execute( + text("SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC".format( + table=table_name, + column=column_name, + )) + ) + if results is None: + return [] + values = [] # The values found + counts = [] # The number or each value + total = 0 # total number of non-NULL results + for result in results: + c = result.f + if c != 0: + total += c + counts.append(c) + v = result.v + if type(v) is decimal.Decimal: + v = float(v) + values.append(v) if not counts or 500 < len(counts): return [] return [ diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 296fa739..f0b89ff8 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -8,7 +8,7 @@ from prettytable import PrettyTable from sqlalchemy import Column, MetaData, Table, text -from sqlsynthgen.generators import everything_factory +from sqlsynthgen.generators import everything_factory, Generator from sqlsynthgen.utils import create_db_engine logger = logging.getLogger(__name__) @@ -76,11 +76,9 @@ def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Ma self.table_entries.append(entry) self.table_index = 0 self.engine = create_db_engine(src_dsn, schema_name=src_schema) - self.connection = self.engine.connect() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): - self.connection.close() self.engine.dispose() def print(self, text: str, *args, **kwargs): @@ -312,27 +310,29 @@ def print_column_data(self, column: str, count: int, min_length: int): column=column, len=min_length, ) - result = self.connection.execute( - text("SELECT {column} FROM {table} {where} ORDER BY RANDOM() LIMIT {count}".format( - table=self.table_name(), - column=column, - count=count, - where=where, - )) - ) - self.columnize([str(x[0]) for x in result.all()]) + with self.engine.connect() as connection: + result = connection.execute( + text("SELECT {column} FROM {table} {where} ORDER BY RANDOM() LIMIT {count}".format( + table=self.table_name(), + column=column, + count=count, + where=where, + )) + ) + self.columnize([str(x[0]) for x in result.all()]) def print_row_data(self, count: int): - result = self.connection.execute( - text("SELECT * FROM {table} ORDER BY RANDOM() LIMIT {count}".format( - table=self.table_name(), - count=count, - )) - ) - if result is None: - self.print("No rows in this table!") - return - self.print_results(result) + with self.engine.connect() as connection: + result = connection.execute( + text("SELECT * FROM {table} ORDER BY RANDOM() LIMIT {count}".format( + table=self.table_name(), + count=count, + )) + ) + if result is None: + self.print("No rows in this table!") + return + self.print_results(result) def update_config_tables(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): with TableCmd(src_dsn, src_schema, metadata, config) as tc: @@ -340,16 +340,6 @@ def update_config_tables(src_dsn: str, src_schema: str, metadata: MetaData, conf return tc.config -class Generator(ABC): - @abstractmethod - def name(self) -> str: - """Get the name of this generator.""" - @abstractmethod - def kws(self) -> list[str]: - """Get a list of names of kwargs that this generator wants.""" - #... - - @dataclass class GeneratorInfo: column: str @@ -592,8 +582,8 @@ def get_generator_proposals(self) -> list[Generator]: if column is None: logger.error("No such column") return [] - gens = everything_factory.get_generators(column, self.connection) - gens.sort(key=lambda g: g.fit()) + gens = everything_factory.get_generators(column, self.engine) + gens.sort(key=lambda g: g.fit(9999)) self.generators = gens return self.generators @@ -611,13 +601,39 @@ def do_compare(self, arg: str): comparison = { "source": self.get_column_data(limit, to_str=str), } - gens = self.get_generator_proposals() + gens: list[Generator] = self.get_generator_proposals() + table_name = self.table_name() for argument in args: if argument.isnumeric(): n = int(argument) if 0 < n and n <= len(gens): gen = gens[n - 1] - comparison[gen.function_name()] = gen.generate_data(limit) + comparison[f"{n}. {gen.function_name()}"] = gen.generate_data(limit) + sacs = gen.select_aggregate_clauses() + cqs = gen.custom_queries() + if not sacs and cqs: + self.print( + "{0}. {1} requires no data from the source database.", + n, + gen.function_name(), + ) + else: + self.print( + "{0}. {1} requires the following data from the source database:", + n, + gen.function_name(), + ) + kwa = gen.actual_kwargs() + kwn = gen.nominal_kwargs() + if sacs: + clauses = [ + f"{q} AS {n}" + for n, q in sacs + ] + select_q = f"SELECT {', '.join(clauses)} FROM {table_name}" + "..." + if cqs: + "..." self.print_table_by_columns(comparison) def get_column_data(self, count: int, to_str=repr, min_length: int = 0): @@ -628,15 +644,16 @@ def get_column_data(self, count: int, to_str=repr, min_length: int = 0): column=column, len=min_length, ) - result = self.connection.execute( - text("SELECT {column} FROM {table} {where} ORDER BY RANDOM() LIMIT {count}".format( - table=self.table_name(), - column=column, - count=count, - where=where, - )) - ) - return [to_str(x[0]) for x in result.all()] + with self.engine.connect() as connection: + result = connection.execute( + text("SELECT {column} FROM {table} {where} ORDER BY RANDOM() LIMIT {count}".format( + table=self.table_name(), + column=column, + count=count, + where=where, + )) + ) + return [to_str(x[0]) for x in result.all()] def do_propose(self, arg): """ @@ -653,9 +670,9 @@ def do_propose(self, arg): for index, gen in enumerate(gens): self.print( "{index}. {name}: (fit: {fit:.0f}) {sample} ...", - index = index + 1, + index=index + 1, name=gen.function_name(), - fit=gen.fit(), + fit=gen.fit(9999), sample=", ".join(map(repr, gen.generate_data(limit))) ) From 15bcb3390ae5d4358fcac586f57ab1e04dc5324c Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 9 Apr 2025 12:53:48 +0100 Subject: [PATCH 45/85] Mimesis string generators gain fit based on length. 'compare' reports values each generator extracts from the database. --- sqlsynthgen/generators.py | 29 +++++++++++++++----------- sqlsynthgen/interactive.py | 42 ++++++++++++++++++++++++++++++-------- 2 files changed, 50 insertions(+), 21 deletions(-) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index 2e4cc9c9..3cde6ead 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -13,6 +13,10 @@ from sqlsynthgen.base import DistributionGenerator +# How many distinct values can we have before we consider a +# choice distribution to be infeasible? +MAXIMUM_CHOICES = 500 + dist_gen = DistributionGenerator() generic = mimesis.Generic(locale=mimesis.locales.Locale.EN_GB) @@ -68,6 +72,9 @@ def custom_queries(self) -> dict[str, str]: For example {"myquery", "SELECT one, too AS two FROM mytable WHERE too > 1"} will populate SRC_STATS["myquery"]["one"] and SRC_STATS["myquery"]["two"] in the src-stats.yaml file. + + Keys should be chosen to minimize the chances of clashing with other queries, + for example "auto__{table}__{column}__{queryname}" """ return {} @@ -232,14 +239,13 @@ def get_generators(self, column: Column, engine: Engine): if not isinstance(column.type.as_generic(), String): return [] try: - with engine.connect() as connection: - buckets = Buckets.make_buckets( - connection, - column.table.name, - f"LENGTH({column.name})", - ) + buckets = Buckets.make_buckets( + engine, + column.table.name, + f"LENGTH({column.name})", + ) fitness_fn = len - except Exception: + except Exception as exc: # Some column types that appear to be strings (such as enums) # cannot have their lengths measured. In this case we cannot # detect fitness using lengths. @@ -459,18 +465,17 @@ class ChoiceGeneratorFactory(GeneratorFactory): """ def get_generators(self, column, engine: Engine): ct = column.type.as_generic() - if not isinstance(ct, Numeric) and not isinstance(ct, Integer): - return [] column_name = column.name table_name = column.table.name with engine.connect() as connection: results = connection.execute( - text("SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC".format( + text("SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC LIMIT {limit}".format( table=table_name, column=column_name, + limit=MAXIMUM_CHOICES+1, )) ) - if results is None: + if results is None or MAXIMUM_CHOICES < results.rowcount: return [] values = [] # The values found counts = [] # The number or each value @@ -484,7 +489,7 @@ def get_generators(self, column, engine: Engine): if type(v) is decimal.Decimal: v = float(v) values.append(v) - if not counts or 500 < len(counts): + if not counts: return [] return [ ZipfChoiceGenerator(table_name, column_name, values, counts), diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index f0b89ff8..5fc6b70f 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -4,8 +4,8 @@ from dataclasses import dataclass from enum import Enum import logging - from prettytable import PrettyTable +import re from sqlalchemy import Column, MetaData, Table, text from sqlsynthgen.generators import everything_factory, Generator @@ -609,9 +609,10 @@ def do_compare(self, arg: str): if 0 < n and n <= len(gens): gen = gens[n - 1] comparison[f"{n}. {gen.function_name()}"] = gen.generate_data(limit) + kwa = gen.actual_kwargs() sacs = gen.select_aggregate_clauses() cqs = gen.custom_queries() - if not sacs and cqs: + if not sacs and not cqs: self.print( "{0}. {1} requires no data from the source database.", n, @@ -623,17 +624,38 @@ def do_compare(self, arg: str): n, gen.function_name(), ) - kwa = gen.actual_kwargs() - kwn = gen.nominal_kwargs() if sacs: clauses = [ f"{q} AS {n}" - for n, q in sacs + for n, q in sacs.items() ] + vals = [] + src_stat2kwarg = { v: k for k, v in gen.nominal_kwargs().items() } + for n in sacs.keys(): + src_stat = f'SRC_STATS["auto__{table_name}"]["{n}"]' + if src_stat in src_stat2kwarg: + ak = src_stat2kwarg[src_stat] + if ak in kwa: + vals.append(kwa[ak]) + else: + vals.append("(actual_kwargs() does not report)") + else: + vals += "(unused)" select_q = f"SELECT {', '.join(clauses)} FROM {table_name}" - "..." + self.print("{0}; providing the following values: {1}", select_q, vals) if cqs: - "..." + cq_key2args = {} + src_stat_re = re.compile(f'SRC_STATS\\["([^"]+)"\\]\\["([^"]+)"\\]') + for argname, src_stat in gen.nominal_kwargs().items(): + if argname in kwa: + src_stat_groups = src_stat_re.match(src_stat) + if src_stat_groups: + cq_key = src_stat_groups.group(1) + if cq_key not in cq_key2args: + cq_key2args[cq_key] = [] + cq_key2args[cq_key].append(kwa[argname]) + for cq_key, cq in cqs.items(): + self.print("{0}; providing the following values: {1}", cq, cq_key2args[cq_key]) self.print_table_by_columns(comparison) def get_column_data(self, count: int, to_str=repr, min_length: int = 0): @@ -668,11 +690,13 @@ def do_propose(self, arg): sample = self.get_column_data(limit) self.print("Sample of actual source data: {0}...", ",".join(sample)) for index, gen in enumerate(gens): + fit = gen.fit() + fit_s = "(no fit)" if fit is None else f"(fit: {fit:.0f})" self.print( - "{index}. {name}: (fit: {fit:.0f}) {sample} ...", + "{index}. {name}: {fit} {sample} ...", index=index + 1, name=gen.function_name(), - fit=gen.fit(9999), + fit=fit_s, sample=", ".join(map(repr, gen.generate_data(limit))) ) From c19e1204cdc1b42ba1f64cd5691e62fa729870b0 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 9 Apr 2025 13:25:39 +0100 Subject: [PATCH 46/85] refactored do_compare --- sqlsynthgen/interactive.py | 111 ++++++++++++++++++++++--------------- 1 file changed, 66 insertions(+), 45 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 5fc6b70f..c34df938 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -609,54 +609,75 @@ def do_compare(self, arg: str): if 0 < n and n <= len(gens): gen = gens[n - 1] comparison[f"{n}. {gen.function_name()}"] = gen.generate_data(limit) - kwa = gen.actual_kwargs() - sacs = gen.select_aggregate_clauses() - cqs = gen.custom_queries() - if not sacs and not cqs: - self.print( - "{0}. {1} requires no data from the source database.", - n, - gen.function_name(), - ) - else: - self.print( - "{0}. {1} requires the following data from the source database:", - n, - gen.function_name(), - ) - if sacs: - clauses = [ + self.print_values_queried(table_name, n, gen) + self.print_table_by_columns(comparison) + + def print_values_queried(self, table_name: str, n: int, gen: Generator): + """ + Print the values queried from the database for this generator. + """ + if not gen.select_aggregate_clauses() and not gen.custom_queries(): + self.print( + "{0}. {1} requires no data from the source database.", + n, + gen.function_name(), + ) + else: + self.print( + "{0}. {1} requires the following data from the source database:", + n, + gen.function_name(), + ) + self.print_select_aggregate_query(table_name, gen) + self.print_custom_queries(gen) + + def print_custom_queries(self, gen: Generator) -> None: + """ + Print all the custom queries and all the values they get in this case. + """ + cqs = gen.custom_queries() + if not cqs: + return + kwa = gen.actual_kwargs() + cq_key2args = {} + src_stat_re = re.compile(f'SRC_STATS\\["([^"]+)"\\]\\["([^"]+)"\\]') + for argname, src_stat in gen.nominal_kwargs().items(): + if argname in kwa: + src_stat_groups = src_stat_re.match(src_stat) + if src_stat_groups: + cq_key = src_stat_groups.group(1) + if cq_key not in cq_key2args: + cq_key2args[cq_key] = [] + cq_key2args[cq_key].append(kwa[argname]) + for cq_key, cq in cqs.items(): + self.print("{0}; providing the following values: {1}", cq, cq_key2args[cq_key]) + + def print_select_aggregate_query(self, table_name, gen) -> None: + """ + Prints the select aggregate query and all the values it gets in this case. + """ + sacs = gen.select_aggregate_clauses() + if not sacs: + return + kwa = gen.actual_kwargs() + clauses = [ f"{q} AS {n}" for n, q in sacs.items() ] - vals = [] - src_stat2kwarg = { v: k for k, v in gen.nominal_kwargs().items() } - for n in sacs.keys(): - src_stat = f'SRC_STATS["auto__{table_name}"]["{n}"]' - if src_stat in src_stat2kwarg: - ak = src_stat2kwarg[src_stat] - if ak in kwa: - vals.append(kwa[ak]) - else: - vals.append("(actual_kwargs() does not report)") - else: - vals += "(unused)" - select_q = f"SELECT {', '.join(clauses)} FROM {table_name}" - self.print("{0}; providing the following values: {1}", select_q, vals) - if cqs: - cq_key2args = {} - src_stat_re = re.compile(f'SRC_STATS\\["([^"]+)"\\]\\["([^"]+)"\\]') - for argname, src_stat in gen.nominal_kwargs().items(): - if argname in kwa: - src_stat_groups = src_stat_re.match(src_stat) - if src_stat_groups: - cq_key = src_stat_groups.group(1) - if cq_key not in cq_key2args: - cq_key2args[cq_key] = [] - cq_key2args[cq_key].append(kwa[argname]) - for cq_key, cq in cqs.items(): - self.print("{0}; providing the following values: {1}", cq, cq_key2args[cq_key]) - self.print_table_by_columns(comparison) + vals = [] + src_stat2kwarg = { v: k for k, v in gen.nominal_kwargs().items() } + for n in sacs.keys(): + src_stat = f'SRC_STATS["auto__{table_name}"]["{n}"]' + if src_stat in src_stat2kwarg: + ak = src_stat2kwarg[src_stat] + if ak in kwa: + vals.append(kwa[ak]) + else: + vals.append("(actual_kwargs() does not report)") + else: + vals += "(unused)" + select_q = f"SELECT {', '.join(clauses)} FROM {table_name}" + self.print("{0}; providing the following values: {1}", select_q, vals) def get_column_data(self, count: int, to_str=repr, min_length: int = 0): column = self.get_column_name() From 8d0a54497a1f0a25bf0366199bc0039a7739adb3 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 10 Apr 2025 19:08:56 +0100 Subject: [PATCH 47/85] Initial outputting of config.yaml --- sqlsynthgen/generators.py | 77 ++++++++++++++++++++ sqlsynthgen/interactive.py | 142 +++++++++++++++++++++++++++---------- 2 files changed, 181 insertions(+), 38 deletions(-) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index 3cde6ead..72d51803 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -3,16 +3,21 @@ """ from abc import ABC, abstractmethod +from collections.abc import Mapping import decimal +import logging import math import mimesis import mimesis.locales +import re from sqlalchemy import Column, Engine, text from sqlalchemy.types import Integer, Numeric, String from typing import Callable from sqlsynthgen.base import DistributionGenerator +logger = logging.getLogger(__name__) + # How many distinct values can we have before we consider a # choice distribution to be infeasible? MAXIMUM_CHOICES = 500 @@ -100,6 +105,78 @@ def fit(self, default=None) -> float | None: return default +class PredefinedGenerator(Generator): + """ + Generator built from an existing config.yaml. + """ + SELECT_AGGREGATE_RE = re.compile(r"SELECT (.*) FROM ([A-Za-z_][A-Za-z0-9_]*)") + AS_CLAUSE = re.compile(r" *(.+) +AS +([A-Za-z_][A-Za-z0-9_]*) *") + SRC_STAT_NAME = re.compile(r"SRC_STATS\[([^]]*)\].*") + + def __init__(self, table_name: str, generator_object: Mapping[str, any], config: Mapping[str, any]): + """ + Initialise a generator from a config.yaml. + :param config: The entire configuration. + :param generator_object: The part of the configuration at tables.*.row_generators + """ + self._table_name = table_name + self._name: str = generator_object["name"] + self._kwn: dict[str, str] = generator_object.get("kwargs", {}) + self._src_stats_mentioned = set() + for kwnv in self._kwn.values(): + ss = self.SRC_STAT_NAME.match(kwnv) + if ss: + self._src_stats_mentioned.add(ss.group(1)) + # Need to deal with this somehow (or remove it from the schema) + self._argn: list[str] = generator_object.get("args", []) + self._select_aggregate_clauses = {} + self._custom_queries = {} + tables = config.get("tables", {}) + for sstat in config.get("src-stats", []): + name: str = sstat["name"] + dpq = sstat.get("dp-query", None) + query = sstat.get("query", dpq) #... should not combine these probably? + if query and name.startswith("auto__"): + qname = name[6:] + sam = None if query is None else self.SELECT_AGGREGATE_RE.match(query) + if sam and qname in tables and qname == sam.group(2): + sacs = [ + self.AS_CLAUSE.match(clause) + for clause in sam.group(1).split(',') + ] + self._select_aggregate_clauses = { + sac.group(2): sac.group(1) + for sac in sacs + if sac is not None + } + elif name in self._src_stats_mentioned: + self._custom_queries[name] = query + + def function_name(self) -> str: + return self._name + + def nominal_kwargs(self) -> dict[str, str]: + return self._kwn + + def select_aggregate_clauses(self) -> dict[str, str]: + return self._select_aggregate_clauses + + def custom_queries(self) -> dict[str, str]: + return self._custom_queries + + def actual_kwargs(self) -> dict[str, any]: + # Run the queries from nominal_kwargs + #... + logger.error("PredefinedGenerator.actual_kwargs not implemented yet") + return {} + + def generate_data(self, count) -> list[any]: + # Call the function if we can. This could be tricky... + #... + logger.error("PredefinedGenerator.generate_data not implemented yet") + return [] + + class GeneratorFactory(ABC): """ A factory for making generators appropriate for a database column. diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index c34df938..d0b39609 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -8,7 +8,7 @@ import re from sqlalchemy import Column, MetaData, Table, text -from sqlsynthgen.generators import everything_factory, Generator +from sqlsynthgen.generators import everything_factory, Generator, PredefinedGenerator from sqlsynthgen.utils import create_db_engine logger = logging.getLogger(__name__) @@ -35,7 +35,7 @@ class TableType(Enum): @dataclass class TableEntry: - name: str + name: str # name of the table class AskSaveCmd(cmd.Cmd): @@ -144,7 +144,6 @@ def make_table_entry(self, name: str) -> TableEntry: def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): super().__init__(src_dsn, src_schema, metadata, config) - self.config = config self.set_prompt() def set_prompt(self): @@ -157,7 +156,7 @@ def set_type(self, t_type: TableType): if self.table_index < len(self.table_entries): entry = self.table_entries[self.table_index] entry.new_type = t_type - def copy_entries(self) -> None: + def _copy_entries(self) -> None: tables = self.config.get("tables", {}) for entry in self.table_entries: if entry.old_type != entry.new_type: @@ -198,7 +197,7 @@ def do_quit(self, _arg): return True reply = self.ask_save() if reply == "yes": - self.copy_entries() + self._copy_entries() return True if reply == "no": return True @@ -268,13 +267,13 @@ def do_data(self, arg: str): arg_index = 0 min_length = 0 table_metadata = self.table_metadata() - if arg_index < len(args) and args[arg_index].isnumeric(): + if arg_index < len(args) and args[arg_index].isdigit(): number = int(args[arg_index]) arg_index += 1 if arg_index < len(args) and args[arg_index] in table_metadata.columns: column = args[arg_index] arg_index += 1 - if arg_index < len(args) and args[arg_index].isnumeric(): + if arg_index < len(args) and args[arg_index].isdigit(): min_length = int(args[arg_index]) arg_index += 1 if arg_index != len(args): @@ -344,8 +343,8 @@ def update_config_tables(src_dsn: str, src_schema: str, metadata: MetaData, conf class GeneratorInfo: column: str is_primary_key: bool - old_name: str | None - new_name: str | None + old_gen: Generator | None + new_gen: Generator | None @dataclass class GeneratorCmdTableEntry(TableEntry): @@ -358,7 +357,7 @@ class GeneratorCmd(DbCmd): def make_table_entry(self, name: str) -> TableEntry: tables = self.config.get("tables", {}) - table = tables.get(name, {}) + table: str = tables.get(name, {}) metadata_table = self.metadata.tables[name] columns = set(metadata_table.columns.keys()) generator_infos: list[GeneratorInfo] = [] @@ -382,11 +381,12 @@ def make_table_entry(self, name: str) -> TableEntry: if len(ca) == 1: single_ca = ca if single_ca is not None: + gen = PredefinedGenerator(table, rg, self.config) generator_infos.append(GeneratorInfo( column=single_ca, is_primary_key=metadata_table.columns[single_ca].primary_key, - old_name=gen_name, - new_name=gen_name, + old_gen=gen, + new_gen=gen, )) else: multiple_columns_assigned[gen_name] = ca @@ -394,8 +394,8 @@ def make_table_entry(self, name: str) -> TableEntry: generator_infos.append(GeneratorInfo( column=col, is_primary_key=metadata_table.columns[col].primary_key, - old_name=None, - new_name=None, + old_gen=None, + new_gen=None, )) if multiple_columns_assigned: self.print( @@ -470,11 +470,11 @@ def set_prompt(self): column = f"{gen_info.column}[pk]" else: column = gen_info.column - if gen_info.new_name: + if gen_info.new_gen: self.prompt = "({table}.{column} ({generator})) ".format( table=table_name, column=column, - generator=gen_info.new_name, + generator=gen_info.new_gen.function_name(), ) else: self.prompt = "({table}.{column}) ".format( @@ -488,38 +488,72 @@ def set_generator(self, generator: str): if self.generator_index < len(entry.generators): entry.generators[self.generator_index] = generator - def copy_entries(self) -> None: + def _remove_auto_src_stats(self) -> list[dict[str, any]]: + src_stats = self.config.get("src-stats", {}) + new_src_stats = [] + for stat in src_stats: + if not stat.get("name", "").startswith("auto__"): + new_src_stats.append(stat) + self.config["src-stats"] = new_src_stats + return new_src_stats + + def _copy_entries(self) -> None: + src_stats = self._remove_auto_src_stats() tables = self.config.get("tables", {}) - for entry in self.table_entries: - # We probably need to reconstruct row_generators. Hmmm. - # We will need to keep row_generators intact not break them apart like now + tes: list[GeneratorCmdTableEntry] = self.table_entries + for entry in tes: + rgs = [] for generator in entry.generators: - pass + if generator.new_gen is not None: + sacs = generator.new_gen.select_aggregate_clauses() + if sacs: + src_stats[f"auto__{entry.name}"] = self._get_aggregate_query(generator.new_gen, entry.name) + cqs = generator.new_gen.custom_queries() + for cq_key, cq in cqs.items(): + src_stats.append({ + "name": cq_key, + "query": cq, + }) + rg = { + "name": generator.new_gen.function_name(), + } + kwn = generator.new_gen.nominal_kwargs() + if kwn: + rg["kwargs"] = kwn + rgs.append(rg) + if entry.name not in tables: + tables[entry.name] = {} + if rgs: + tables[entry.name]["row_generators"] = rgs + elif "row_generators" in tables[entry.name]: + del tables[entry.name]["row_generators"] self.config["tables"] = tables + self.config["src-stats"] = src_stats def do_quit(self, _arg): "Check the updates, save them if desired and quit the configurer." count = 0 for entry in self.table_entries: header_shown = False - for gen in entry.generators: - if gen.old_name != gen.new_name: + g_entry: GeneratorCmdTableEntry = entry + for gen in g_entry.generators: + if gen.old_gen != gen.new_gen: if not header_shown: header_shown = True self.print("Table {0}:", entry.name) count += 1 self.print( "...changing {0} from {1} to {2}", - gen.name, - gen.old_name, - gen.new_name, + gen.column, + gen.old_gen.function_name() if gen.old_gen else "nothing", + gen.new_gen.function_name() if gen.new_gen else "nothing", ) if count == 0: self.print("There are no changes.") return True reply = self.ask_save() if reply == "yes": - self.copy_entries() + self._copy_entries() return True if reply == "no": return True @@ -538,15 +572,15 @@ def do_list(self, arg): self.print("Error: no table {0}", self.table_index) return for gen in self.table_entries[self.table_index].generators: - old = "" if gen.old_name is None else gen.old_name - if gen.old_name == gen.new_name: + old = "" if gen.old_gen is None else gen.old_gen.function_name() + if gen.old_gen == gen.new_gen: becomes = "" if old == "": old = "(not set)" - elif gen.new_name is None: + elif gen.new_gen is None: becomes = "(delete)" else: - becomes = f"->{gen.new_name}" + becomes = f"->{gen.new_gen.function_name()}" primary = "[primary-key]" if gen.is_primary_key else "" self.print("{0}{1}{2} {3}", old, becomes, primary, gen.column) @@ -575,7 +609,7 @@ def do_previous(self, _arg): self.set_prompt() def get_generator_proposals(self) -> list[Generator]: - if (self.table_index, self.generator_index) != self.generators_valid_indices: + if self.generators_valid_indices != (self.table_index, self.generator_index): self.generators = None if self.generators is None: column = self.column_metadata() @@ -585,6 +619,7 @@ def get_generator_proposals(self) -> list[Generator]: gens = everything_factory.get_generators(column, self.engine) gens.sort(key=lambda g: g.fit(9999)) self.generators = gens + self.generators_valid_indices = (self.table_index, self.generator_index) return self.generators def do_compare(self, arg: str): @@ -604,7 +639,7 @@ def do_compare(self, arg: str): gens: list[Generator] = self.get_generator_proposals() table_name = self.table_name() for argument in args: - if argument.isnumeric(): + if argument.isdigit(): n = int(argument) if 0 < n and n <= len(gens): gen = gens[n - 1] @@ -652,6 +687,16 @@ def print_custom_queries(self, gen: Generator) -> None: for cq_key, cq in cqs.items(): self.print("{0}; providing the following values: {1}", cq, cq_key2args[cq_key]) + def _get_aggregate_query(self, gen: Generator, table_name: str) -> str | None: + sacs = gen.select_aggregate_clauses() + if not sacs: + return None + clauses = [ + f"{q} AS {n}" + for n, q in sacs.items() + ] + return f"SELECT {', '.join(clauses)} FROM {table_name}" + def print_select_aggregate_query(self, table_name, gen) -> None: """ Prints the select aggregate query and all the values it gets in this case. @@ -660,10 +705,6 @@ def print_select_aggregate_query(self, table_name, gen) -> None: if not sacs: return kwa = gen.actual_kwargs() - clauses = [ - f"{q} AS {n}" - for n, q in sacs.items() - ] vals = [] src_stat2kwarg = { v: k for k, v in gen.nominal_kwargs().items() } for n in sacs.keys(): @@ -676,7 +717,7 @@ def print_select_aggregate_query(self, table_name, gen) -> None: vals.append("(actual_kwargs() does not report)") else: vals += "(unused)" - select_q = f"SELECT {', '.join(clauses)} FROM {table_name}" + select_q = self._get_aggregate_query(gen, table_name) self.print("{0}; providing the following values: {1}", select_q, vals) def get_column_data(self, count: int, to_str=repr, min_length: int = 0): @@ -721,6 +762,31 @@ def do_propose(self, arg): sample=", ".join(map(repr, gen.generate_data(limit))) ) + def do_set(self, arg: str): + """ + Set one of the proposals as a generator. + Takes a single integer argument. + """ + if not arg.isdigit(): + self.print("set requires a single integer argument; 'set 3' sets the third generator that 'propose' lists.") + return + gens = self.get_generator_proposals() + index = int(arg) + if index < 1: + self.print("set's argument must be at least 1") + return + if len(gens) <= index: + self.print("There are currently only {0} generators proposed, please select one of them.") + return + (table, gen_info) = self.get_table_and_generator() + if table is None: + self.print("Error: no table") + return + if gen_info is None: + self.print("Error: no column") + return + gen_info.new_gen = gens[index - 1] + def update_config_generators(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): with GeneratorCmd(src_dsn, src_schema, metadata, config) as gc: From 428adec028cb7bb5243e1542560ab502a231d1aa Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 11 Apr 2025 11:46:05 +0100 Subject: [PATCH 48/85] Fix configure-generators output --- config-old.yaml | 56 ++++++++++++++++++++++++++++++++++++++ sqlsynthgen/base.py | 2 +- sqlsynthgen/interactive.py | 19 +++++++++---- 3 files changed, 71 insertions(+), 6 deletions(-) create mode 100644 config-old.yaml diff --git a/config-old.yaml b/config-old.yaml new file mode 100644 index 00000000..4a028e55 --- /dev/null +++ b/config-old.yaml @@ -0,0 +1,56 @@ +story_generators: +- name: ssg_pagila.rental_story + num_stories_per_pass: 30 +story_generators_module: ssg_pagila +tables: + actor: + row_generators: + - columns_assigned: first_name + name: generic.person.first_name + - columns_assigned: last_name + name: generic.person.last_name + address: + num_rows_per_pass: 1 + row_generators: + - columns_assigned: fulltext + name: generic.text.text + vocabulary_columns: + - rating + - rental_duration + - rental_rate + - replacement_cost + category: + vocabulary_table: true + city: + vocabulary_table: true + country: + vocabulary_table: true + customer: + row_generators: + - columns_assigned: email + name: generic.person.email + - columns_assigned: first_name + name: generic.person.first_name + - columns_assigned: last_name + name: generic.person.last_name + vocabulary_columns: + - active + language: + vocabulary_table: true + payment: + vocabulary_columns: + - amount + payment_p2022_01: + ignore: true + payment_p2022_02: + ignore: true + payment_p2022_03: + ignore: true + payment_p2022_04: + ignore: true + payment_p2022_05: + ignore: true + payment_p2022_06: + ignore: true + payment_p2022_07: + ignore: true diff --git a/sqlsynthgen/base.py b/sqlsynthgen/base.py index eed59019..56fc8fc7 100644 --- a/sqlsynthgen/base.py +++ b/sqlsynthgen/base.py @@ -51,7 +51,7 @@ def choice(self, a): return self.rng.choice(a).item() def zipf_choice(self, a, n): - return self.rng.choice(a, p = zipf_weights(n)).item() + return self.rng.choice(a, p = zipf_weights(n)) class TableGenerator(ABC): diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index d0b39609..5c8f6a4c 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -359,7 +359,7 @@ def make_table_entry(self, name: str) -> TableEntry: tables = self.config.get("tables", {}) table: str = tables.get(name, {}) metadata_table = self.metadata.tables[name] - columns = set(metadata_table.columns.keys()) + columns = {str(col_name) for col_name in metadata_table.columns.keys()} generator_infos: list[GeneratorInfo] = [] multiple_columns_assigned: dict[str, list[str]] = {} for rg in table.get("row_generators", []): @@ -379,7 +379,7 @@ def make_table_entry(self, name: str) -> TableEntry: else: columns.difference_update(ca) if len(ca) == 1: - single_ca = ca + single_ca = str(ca) if single_ca is not None: gen = PredefinedGenerator(table, rg, self.config) generator_infos.append(GeneratorInfo( @@ -507,7 +507,10 @@ def _copy_entries(self) -> None: if generator.new_gen is not None: sacs = generator.new_gen.select_aggregate_clauses() if sacs: - src_stats[f"auto__{entry.name}"] = self._get_aggregate_query(generator.new_gen, entry.name) + src_stats.append({ + "name": f"auto__{entry.name}", + "query": self._get_aggregate_query(generator.new_gen, entry.name), + }) cqs = generator.new_gen.custom_queries() for cq_key, cq in cqs.items(): src_stats.append({ @@ -516,6 +519,7 @@ def _copy_entries(self) -> None: }) rg = { "name": generator.new_gen.function_name(), + "columns_assigned": generator.column, } kwn = generator.new_gen.nominal_kwargs() if kwn: @@ -721,7 +725,7 @@ def print_select_aggregate_query(self, table_name, gen) -> None: self.print("{0}; providing the following values: {1}", select_q, vals) def get_column_data(self, count: int, to_str=repr, min_length: int = 0): - column = self.get_column_name() + column = str(self.get_column_name()) where = "" if 0 < min_length: where = "WHERE LENGTH({column}) >= {len}".format( @@ -753,7 +757,12 @@ def do_propose(self, arg): self.print("Sample of actual source data: {0}...", ",".join(sample)) for index, gen in enumerate(gens): fit = gen.fit() - fit_s = "(no fit)" if fit is None else f"(fit: {fit:.0f})" + if fit is None: + fit_s = "(no fit)" + elif fit < 100: + fit_s = f"(fit: {fit:.3g})" + else: + fit_s = f"(fit: {fit:.0f})" self.print( "{index}. {name}: {fit} {sample} ...", index=index + 1, From 8738abd26a5e7094bc447200fb61a92b8247dfe3 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 11 Apr 2025 15:11:04 +0100 Subject: [PATCH 49/85] Mimesis DateTime generators --- config-old.yaml | 56 ----------------- sqlsynthgen/base.py | 2 +- sqlsynthgen/generators.py | 121 ++++++++++++++++++++++++++++++++----- sqlsynthgen/interactive.py | 14 ++--- 4 files changed, 114 insertions(+), 79 deletions(-) delete mode 100644 config-old.yaml diff --git a/config-old.yaml b/config-old.yaml deleted file mode 100644 index 4a028e55..00000000 --- a/config-old.yaml +++ /dev/null @@ -1,56 +0,0 @@ -story_generators: -- name: ssg_pagila.rental_story - num_stories_per_pass: 30 -story_generators_module: ssg_pagila -tables: - actor: - row_generators: - - columns_assigned: first_name - name: generic.person.first_name - - columns_assigned: last_name - name: generic.person.last_name - address: - num_rows_per_pass: 1 - row_generators: - - columns_assigned: fulltext - name: generic.text.text - vocabulary_columns: - - rating - - rental_duration - - rental_rate - - replacement_cost - category: - vocabulary_table: true - city: - vocabulary_table: true - country: - vocabulary_table: true - customer: - row_generators: - - columns_assigned: email - name: generic.person.email - - columns_assigned: first_name - name: generic.person.first_name - - columns_assigned: last_name - name: generic.person.last_name - vocabulary_columns: - - active - language: - vocabulary_table: true - payment: - vocabulary_columns: - - amount - payment_p2022_01: - ignore: true - payment_p2022_02: - ignore: true - payment_p2022_03: - ignore: true - payment_p2022_04: - ignore: true - payment_p2022_05: - ignore: true - payment_p2022_06: - ignore: true - payment_p2022_07: - ignore: true diff --git a/sqlsynthgen/base.py b/sqlsynthgen/base.py index 56fc8fc7..470b34fe 100644 --- a/sqlsynthgen/base.py +++ b/sqlsynthgen/base.py @@ -48,7 +48,7 @@ def normal(self, mean: float, sd: float) -> float: return self.rng.normal(loc=mean, scale=sd) def choice(self, a): - return self.rng.choice(a).item() + return self.rng.choice(a) def zipf_choice(self, a, n): return self.rng.choice(a, p = zipf_weights(n)) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index 72d51803..4b827133 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -11,7 +11,7 @@ import mimesis.locales import re from sqlalchemy import Column, Engine, text -from sqlalchemy.types import Integer, Numeric, String +from sqlalchemy.types import Date, DateTime, Integer, Numeric, String, Time from typing import Callable from sqlsynthgen.base import DistributionGenerator @@ -257,21 +257,15 @@ def get_generators(self, column: Column, engine: Engine) -> list[Generator]: ] -class MimesisGenerator(Generator): +class MimesisGeneratorBase(Generator): def __init__( self, function_name: str, - value_fn: Callable[[any], float] | None=None, - buckets: Buckets | None=None, ): """ Generator from Mimesis. - :param: function_name is relative to 'generic', for example 'person.name'. - :param: value_fn Function to convert generator output to floats, if needed. The values - thus produced are compared against the buckets to estimate the fit. - :param: buckets The distribution of string lengths in the real data. If this is None - then the fit method will return None. + :param function_name: is relative to 'generic', for example 'person.name'. """ super().__init__() f = generic @@ -281,8 +275,34 @@ def __init__( f = getattr(f, part) if not callable(f): raise Exception(f"Mimesis object {function_name} is not a callable, so cannot be used as a generator") - self.name = "generic." + function_name - self.generator_function = f + self._name = "generic." + function_name + self._generator_function = f + def function_name(self): + return self._name + def generate_data(self, count): + return [ + self._generator_function() + for _ in range(count) + ] + + +class MimesisGenerator(MimesisGeneratorBase): + def __init__( + self, + function_name: str, + value_fn: Callable[[any], float] | None=None, + buckets: Buckets | None=None, + ): + """ + Generator from Mimesis. + + :param function_name: is relative to 'generic', for example 'person.name'. + :param value_fn: Function to convert generator output to floats, if needed. The values + thus produced are compared against the buckets to estimate the fit. + :param buckets: The distribution of string lengths in the real data. If this is None + then the fit method will return None. + """ + super().__init__(function_name) if buckets is None: self._fit = None return @@ -294,18 +314,51 @@ def __init__( ] self._fit = buckets.fit_from_values(samples) def function_name(self): - return self.name + return self._name def nominal_kwargs(self): return {} def actual_kwargs(self): return {} + def fit(self, default=None): + return default if self._fit is None else self._fit + + +class MimesisDateTimeGenerator(MimesisGeneratorBase): + def __init__(self, column: Column, engine: Engine, function_name: str): + super().__init__(function_name) + self._column = column + self._function_name = function_name + self._extract_year = f"EXTRACT(YEAR FROM {column.name})" + self._max_year = f"MAX({self._extract_year})" + self._min_year = f"MIN({self._extract_year})" + with engine.connect() as connection: + result = connection.execute( + text(f"SELECT {self._min_year} AS start, {self._max_year} AS end FROM {column.table.name}") + ).first() + if result is None: + return None + self._start = result.start + self._end = result.end + def nominal_kwargs(self): + return { + "start": f'SRC_STATS["auto__{self._column.table.name}"]["{self._column.name}__start"]', + "end": f'SRC_STATS["auto__{self._column.table.name}"]["{self._column.name}__end"]', + } + def actual_kwargs(self): + return { + "start": self._start, + "end": self._end, + } + def select_aggregate_clauses(self) -> dict[str, str]: + return { + f"{self._column.name}__start": self._min_year, + f"{self._column.name}__end": self._max_year, + } def generate_data(self, count): return [ - self.generator_function() + self._generator_function(start=self._start, end=self._end) for _ in range(count) ] - def fit(self, default=None): - return default if self._fit is None else self._fit class MimesisStringGeneratorFactory(GeneratorFactory): @@ -362,6 +415,7 @@ def get_generators(self, column: Column, engine: Engine): "text.word", ])) + class MimesisFloatGeneratorFactory(GeneratorFactory): """ All Mimesis generators that return floating point numbers. @@ -373,6 +427,40 @@ def get_generators(self, column: Column, _engine: Engine): "person.height", ])) + +class MimesisDateGeneratorFactory(GeneratorFactory): + """ + All Mimesis generators that return dates. + """ + def get_generators(self, column: Column, engine: Engine): + ct = column.type.as_generic() + if not isinstance(ct, Date): + return [] + return [MimesisDateTimeGenerator(column, engine, "datetime.date")] + + +class MimesisDateTimeGeneratorFactory(GeneratorFactory): + """ + All Mimesis generators that return datetimes. + """ + def get_generators(self, column: Column, engine: Engine): + ct = column.type.as_generic() + if not isinstance(ct, DateTime): + return [] + return [MimesisDateTimeGenerator(column, engine, "datetime.datetime")] + + +class MimesisTimeGeneratorFactory(GeneratorFactory): + """ + All Mimesis generators that return times. + """ + def get_generators(self, column: Column, _engine: Engine): + ct = column.type.as_generic() + if not isinstance(ct, Time): + return [] + return [MimesisGenerator("datetime.time")] + + class MimesisIntegerGeneratorFactory(GeneratorFactory): """ All Mimesis generators that return integers. @@ -578,6 +666,9 @@ def get_generators(self, column, engine: Engine): MimesisStringGeneratorFactory(), MimesisIntegerGeneratorFactory(), MimesisFloatGeneratorFactory(), + MimesisDateGeneratorFactory(), + MimesisDateTimeGeneratorFactory(), + MimesisTimeGeneratorFactory(), ContinuousDistributionGeneratorFactory(), ChoiceGeneratorFactory(), ]) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 5c8f6a4c..56b2bb9f 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -379,7 +379,7 @@ def make_table_entry(self, name: str) -> TableEntry: else: columns.difference_update(ca) if len(ca) == 1: - single_ca = str(ca) + single_ca = str(ca[0]) if single_ca is not None: gen = PredefinedGenerator(table, rg, self.config) generator_infos.append(GeneratorInfo( @@ -519,7 +519,7 @@ def _copy_entries(self) -> None: }) rg = { "name": generator.new_gen.function_name(), - "columns_assigned": generator.column, + "columns_assigned": [generator.column], } kwn = generator.new_gen.nominal_kwargs() if kwn: @@ -701,7 +701,7 @@ def _get_aggregate_query(self, gen: Generator, table_name: str) -> str | None: ] return f"SELECT {', '.join(clauses)} FROM {table_name}" - def print_select_aggregate_query(self, table_name, gen) -> None: + def print_select_aggregate_query(self, table_name, gen: Generator) -> None: """ Prints the select aggregate query and all the values it gets in this case. """ @@ -718,9 +718,9 @@ def print_select_aggregate_query(self, table_name, gen) -> None: if ak in kwa: vals.append(kwa[ak]) else: - vals.append("(actual_kwargs() does not report)") + logger.warning("actual_kwargs for %s does not report %s", gen.function_name(), ak) else: - vals += "(unused)" + logger.warning('nominal_kwargs for %s does not have a value SRC_STATS["auto__%s"]["%s"]', gen.function_name(), table_name, n) select_q = self._get_aggregate_query(gen, table_name) self.print("{0}; providing the following values: {1}", select_q, vals) @@ -784,8 +784,8 @@ def do_set(self, arg: str): if index < 1: self.print("set's argument must be at least 1") return - if len(gens) <= index: - self.print("There are currently only {0} generators proposed, please select one of them.") + if len(gens) < index: + self.print("There are currently only {0} generators proposed, please select one of them.", index) return (table, gen_info) = self.get_table_and_generator() if table is None: From 5b375ae9a0467ad67b89c7b72d733788a61809c5 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Mon, 14 Apr 2025 16:46:58 +0100 Subject: [PATCH 50/85] Automatic generators removed --- docs/source/introduction.rst | 1 - sqlsynthgen/interactive.py | 2 +- sqlsynthgen/json_schemas/config_schema.json | 8 - sqlsynthgen/make.py | 214 ++------------------ 4 files changed, 17 insertions(+), 208 deletions(-) diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst index a522b97c..a4a62be3 100644 --- a/docs/source/introduction.rst +++ b/docs/source/introduction.rst @@ -167,7 +167,6 @@ So we edit the appropriate parts of the ``config.yaml`` file. You will see seven num_rows_per_pass: 1 row_generators: [] unions: {} - vocabulary_columns: [] vocabulary_table: false We need to change ``ignore: false`` to ``ignore: true``, and we can delete the other lines in these blocks if we like: diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 56b2bb9f..29b0257e 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -489,7 +489,7 @@ def set_generator(self, generator: str): entry.generators[self.generator_index] = generator def _remove_auto_src_stats(self) -> list[dict[str, any]]: - src_stats = self.config.get("src-stats", {}) + src_stats = self.config.get("src-stats", []) new_src_stats = [] for stat in src_stats: if not stat.get("name", "").startswith("auto__"): diff --git a/sqlsynthgen/json_schemas/config_schema.json b/sqlsynthgen/json_schemas/config_schema.json index 77db08c2..5033fda4 100644 --- a/sqlsynthgen/json_schemas/config_schema.json +++ b/sqlsynthgen/json_schemas/config_schema.json @@ -202,14 +202,6 @@ } } } - }, - "vocabulary_columns": { - "description": "Columns whose set of possible values is not considered private and so can be reproduced in the output database.", - "type": "array", - "items": { - "description": "Column name.", - "type": "string" - } } } } diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 0a527df7..7430f69d 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -153,9 +153,6 @@ def make_column_choices( ) -> list[ColumnChoice]: # each union is a dict of union names to a list of its columns unions: dict[str, list[str]] = get_property(table_config, "unions", {}) - generic = src_stats.get("_sqlsynthgen_generic", {}) - table_stats = generic.get(table_name, {}) - column_generators = table_stats.get("column_generators", {}) # Set of all columns that are part of a union columns_in_union: set[str] = set() for (union_name, cols) in unions.items(): @@ -181,15 +178,11 @@ def make_column_choices( elif 1 == n: u = intersection.pop() dependent_columns[u] = assigned_set - # table row count: - row_count: int | None = table_stats.get("row_count", None) - # some columns have counts of nonnull values: - value_count = { col: vs.get("count", None) for (col, vs) in column_generators.items() } # Now we can convert unions to ColumnChoices choices: list[ColumnChoice] = [] for cols in unions.values(): choices.append( - ColumnChoice.make(cols, dependent_columns, row_count, value_count) + ColumnChoice.make(cols, dependent_columns) ) return choices @@ -573,23 +566,9 @@ def _get_generator_for_table( row_gen_info_data, columns_covered = _get_row_generator(table_config) table_data.row_gens.extend(row_gen_info_data) - generic_generators = get_property(src_stats, "_sqlsynthgen_generic", {} - ).get(table.name, {}).get("column_generators", {}) for column in table.columns: if column.name not in columns_covered: - # No generator for this column in the user config. - # Perhaps there is something for us in src-stats.yaml's - # _sqlsynthgen_generic? - if column.name in generic_generators: - gen = generic_generators[column.name] - table_data.row_gens.append( - RowGeneratorInfo([column.name], FunctionCall( - gen["name"], - [f"{k}={v}" for (k, v) in gen.get("kwargs", {}).items()] - )) - ) - else: - table_data.row_gens.append(_get_default_generator(column)) + table_data.row_gens.append(_get_default_generator(column)) return table_data @@ -773,7 +752,6 @@ def generate_config_file( "unions": {}, "num_rows_per_pass": 1, "row_generators": [], - "vocabulary_columns": [], } tables[table_name] = table return yaml.dump({"tables": tables}) @@ -815,47 +793,6 @@ def reflect_if(table_name: str, _: Any) -> bool: return yaml.dump(meta_dict) -def zipf_distribution(total, bins): - basic_dist = list(map(lambda n: 1/n, range(1, bins + 1))) - bd_remaining = sum(basic_dist) - for b in basic_dist: - # yield b/bd_remaining of the `total` remaining - if bd_remaining == 0: - yield 0 - else: - x = math.floor(0.5 + total * b / bd_remaining) - bd_remaining -= x * bd_remaining / total - total -= x - yield x - - -def uniform_distribution(total, bins): - p = total // bins - n = total % bins - for i in range(0, n): - yield p + 1 - for i in range(n, bins): - yield p - - -def fit_error(test, actual): - return sum(map(lambda t, a: (t - a)*(t - a), test, actual)) - - -_CDF_BUCKETS = { - "dist_gen.normal": { - "buckets": [0.0227, 0.0441, 0.0918, 0.1499, 0.1915, 0.1915, 0.1499, 0.0918, 0.0441, 0.0227], - "kwarg_fn": lambda mean, sd: {"mean": mean, "sd": sd}, - }, - # Uniform wih mean 0 and sigma 1 runs between +/-sqrt(3) = +/-1.732 - # and has height 1 / 2sqrt(3) = 0.28868. - "dist_gen.uniform": { - "buckets": [0, 0.06698, 0.14434, 0.14434, 0.14434, 0.14434, 0.14434, 0.14434, 0.06698, 0], - "kwarg_fn": lambda mean, sd: {"low": mean - sd * math.sqrt(3), "high": mean + sd * math.sqrt(3)}, - }, -} - - class DbConnection: def __init__(self, engine): self._engine = engine @@ -889,7 +826,7 @@ async def execute_query(self, query_block: Mapping[str, Any]) -> Any: """Execute query in query_block.""" logger.debug("Executing query %s", query_block["name"]) query = text(query_block["query"]) - raw_result = self.execute_raw_query(query) + raw_result = await self.execute_raw_query(query) if "dp-query" in query_block: result_df = pd.DataFrame(raw_result.mappings()) @@ -911,6 +848,18 @@ async def execute_query(self, query_block: Mapping[str, Any]) -> Any: return final_result +def fix_type(value): + if type(value) is decimal.Decimal: + return float(value) + return value + + +def fix_types(dics): + return [{ + k: fix_type(v) for k, v in dic.items() + } for dic in dics] + + async def make_src_stats( dsn: str, config: Mapping, metadata: MetaData, schema_name: Optional[str] = None ) -> dict[str, list[dict]]: @@ -939,7 +888,7 @@ async def make_src_stats_connection(config: Mapping, db_conn: DbConnection, meta *[db_conn.execute_query(query_block) for query_block in query_blocks] ) src_stats = { - query_block["name"]: result + query_block["name"]: fix_types(result) for query_block, result in zip(query_blocks, results) } @@ -947,135 +896,4 @@ async def make_src_stats_connection(config: Mapping, db_conn: DbConnection, meta if not result: logger.warning("src-stats query %s returned no results", name) - generic = {} - tables_config = config.get("tables", {}) - for table_name0, table in metadata.tables.items(): - table_name = str(table_name0) - row_count = await db_conn.table_row_count(table_name) - generic[table_name] = { - "row_count": row_count, - "column_generators": {}, - } - table_config = tables_config.get(table_name, None) - vocab_columns = set() if table_config is None else set(table_config.get("vocabulary_columns", [])) - for column_name, column in table.columns.items(): - is_vocab = column_name in vocab_columns - info = _get_info_for_column_type(type(column.type)) - best_generic_generator = None - if not column.foreign_keys and not column.primary_key and info is not None: - if info.numeric: - # Find summary information; mean, standard deviation and buckets 1/2 standard deviation width around mean. - best_generic_generator = await _get_generic_numeric_generator( - db_conn, - column_name, - table_name, - ) - if info.choice and is_vocab: # If it's not a vocabulary column then it's less useful to work out the choice distribution - # Find information on how many of each example there is - gg = await _get_generic_choice_generator( - db_conn, - column_name, - table_name, - ) - if best_generic_generator is None or ( - gg is not None and gg["fit"] < best_generic_generator["fit"] - ): - best_generic_generator = gg - if info.summary_query is not None: - # Run specified query - results = await db_conn.execute_raw_query(text(info.summary_query.format( - column=column_name, table=table_name - ))) - kw = get_result_mappings(info, results) - if kw is not None: - best_generic_generator = { "name": info.generator, "kwargs": kw } - if best_generic_generator is not None: - generic[table_name]["column_generators"][str(column_name)] = best_generic_generator - if generic: - src_stats["_sqlsynthgen_generic"] = generic return src_stats - - -async def _get_generic_choice_generator(db_conn, column_name, table_name): - results = await db_conn.execute_raw_query(text( - "SELECT {column} AS v, COUNT({column}) AS f FROM {table} GROUP BY v ORDER BY f DESC".format( - column=column_name, table=table_name - ) - )) - values = [] # The values found - counts = [] # The number or each value - total = 0 # total number of non-NULL results - for result in results: - c = result.f - if c != 0: - total += c - counts.append(c) - v = result.v - if type(v) is decimal.Decimal: - v = float(v) - values.append(v) - if not counts: - return None - total2 = total * total - # Which distribution fits best? - zipf = zipf_distribution(total, len(counts)) - zipf_fit = fit_error(zipf, counts) / total2 - unif = uniform_distribution(total, len(counts)) - unif_fit = fit_error(unif, counts) / total2 - if zipf_fit < unif_fit: - return { - "name": "dist_gen.zipf_choice", - "fit": zipf_fit, - "count": total, - "kwargs": { - "a": values, - "n": f"{len(counts)}", - } - } - return { - "name": "dist_gen.choice", - "fit": unif_fit, - "count": total, - "kwargs": { - "a": values, - } - } - - -async def _get_generic_numeric_generator(db_conn, column_name, table_name): - # Find summary information; mean, standard deviation and buckets 1/2 standard deviation width around mean. - results = await db_conn.execute_raw_query(text( - "SELECT AVG({column}) AS mean, STDDEV({column}) AS sd, COUNT({column}) AS count FROM {table}".format( - column=column_name, table=table_name - ) - )) - result = results.first() - count = result.count - generator = None - if result.sd is not None and not math.isnan(result.sd) and 0 < result.sd: - raw_buckets = await db_conn.execute_raw_query(text( - "SELECT COUNT({column}) AS f, FLOOR(({column} - {x})/{w}) AS b FROM {table} GROUP BY b".format( - column=column_name, table=table_name, x=result.mean - 2 * result.sd, w = result.sd / 2 - ) - )) - buckets = [0] * 10 - for rb in raw_buckets: - if rb.b is not None: - bucket = min(9, max(0, int(rb.b) + 1)) - buckets[bucket] += rb.f / count - best_fit = None - best_fit_distribution = None - best_fit_info = None - for dist_name, dist_info in _CDF_BUCKETS.items(): - fit = fit_error(dist_info["buckets"], buckets) - if best_fit is None or fit < best_fit: - best_fit = fit - best_fit_distribution = dist_name - best_fit_info = dist_info - generator = { - "name": best_fit_distribution, - "fit": best_fit, - "count": count, - "kwargs": best_fit_info["kwarg_fn"](float(result.mean), float(result.sd)), - } - return generator From b202495ea05543a72b0db978cdcb3638c8e10702 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 15 Apr 2025 10:28:09 +0100 Subject: [PATCH 51/85] Fix verbosity to apply to root config --- sqlsynthgen/utils.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index fe90e529..0969fc27 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -210,8 +210,6 @@ def warning_or_higher(record: logging.LogRecord) -> bool: def conf_logger(verbose: bool) -> None: """Configure the logger.""" # Note that this function modifies the global `logger`. - level = logging.DEBUG if verbose else logging.INFO - logger.setLevel(level) log_format = "%(message)s" # info will always be printed to stdout @@ -225,8 +223,11 @@ def conf_logger(verbose: bool) -> None: stderr_handler.setFormatter(logging.Formatter(log_format)) stderr_handler.addFilter(warning_or_higher) - logger.addHandler(stdout_handler) - logger.addHandler(stderr_handler) + logging.basicConfig( + level=logging.DEBUG if verbose else logging.INFO, + format=log_format, + handlers=[stdout_handler, stderr_handler], + ) def get_flag(maybe_dict, key): From 26e076d57507e5875332841ef459cdac913d67ad Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 15 Apr 2025 15:27:54 +0100 Subject: [PATCH 52/85] #9 speed up tests using Postgres One setup/teardown per class, not per test. --- tests/utils.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/tests/utils.py b/tests/utils.py index fc67b30f..71c60e6a 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -63,10 +63,19 @@ class RequiresDBTestCase(SSGTestCase): examples_dir = "tests/examples" dump_file_path = None database_name = None + Postgresql = None + + @classmethod + def setUpClass(cls): + cls.Postgresql = testing.postgresql.PostgresqlFactory(cache_initialized_db=True) + + @classmethod + def tearDownClass(cls): + cls.Postgresql.clear_cache() def setUp(self) -> None: super().setUp() - self.postgresql = testing.postgresql.Postgresql() + self.postgresql = self.Postgresql() if self.dump_file_path is not None: self.run_psql(Path(self.examples_dir) / Path(self.dump_file_path)) self.engine = create_db_engine( From 88f9dec78beb7297862754865cc08bc8172f06f4 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 15 Apr 2025 19:15:39 +0100 Subject: [PATCH 53/85] #11 configure-generators keeps column order stable --- sqlsynthgen/interactive.py | 55 +++++++++++++++++++------------------- 1 file changed, 27 insertions(+), 28 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 29b0257e..913f5cf1 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -355,12 +355,12 @@ class GeneratorCmd(DbCmd): prompt = "(generatorconf) " file = None - def make_table_entry(self, name: str) -> TableEntry: + def make_table_entry(self, table_name: str) -> TableEntry: tables = self.config.get("tables", {}) - table: str = tables.get(name, {}) - metadata_table = self.metadata.tables[name] - columns = {str(col_name) for col_name in metadata_table.columns.keys()} - generator_infos: list[GeneratorInfo] = [] + table: str = tables.get(table_name, {}) + metadata_table = self.metadata.tables[table_name] + columns = frozenset(metadata_table.columns.keys()) + col2gen: dict[str, Generator] = {} multiple_columns_assigned: dict[str, list[str]] = {} for rg in table.get("row_generators", []): gen_name = rg.get("name", None) @@ -368,46 +368,45 @@ def make_table_entry(self, name: str) -> TableEntry: ca = rg.get("columns_assigned", []) single_ca = None if isinstance(ca, str): - if ca in columns: - columns.remove(ca) - single_ca = ca - else: + if ca not in columns: logger.warning( "table '%s' has '%s' assigned to column '%s' which is not in this table", - name, gen_name, ca, + table_name, gen_name, ca, + ) + elif ca in col2gen: + logger.warning( + "table '%s' has column '%s' assigned to multiple times", + table_name, ca, ) + else: + single_ca = ca else: - columns.difference_update(ca) if len(ca) == 1: single_ca = str(ca[0]) - if single_ca is not None: - gen = PredefinedGenerator(table, rg, self.config) - generator_infos.append(GeneratorInfo( - column=single_ca, - is_primary_key=metadata_table.columns[single_ca].primary_key, - old_gen=gen, - new_gen=gen, - )) - else: - multiple_columns_assigned[gen_name] = ca - for col in columns: + if single_ca is not None: + col2gen[single_ca] = PredefinedGenerator(table, rg, self.config) + else: + multiple_columns_assigned[gen_name] = ca + generator_infos: list[GeneratorInfo] = [] + for name, col in metadata_table.columns.items(): + gen = col2gen.get(name, None) generator_infos.append(GeneratorInfo( - column=col, - is_primary_key=metadata_table.columns[col].primary_key, - old_gen=None, - new_gen=None, + column=name, + is_primary_key=col.primary_key, + old_gen=gen, + new_gen=gen, )) if multiple_columns_assigned: self.print( "The following mulit-column generators for table {0} are defined in the configuration file and cannot be configured with this command", - name, + table_name, ) for (gen_name, cols) in multiple_columns_assigned.items(): self.print(" {0}: {1}", gen_name, cols) if len(generator_infos) == 0: return None return GeneratorCmdTableEntry( - name=name, + name=table_name, generators=generator_infos ) From a3150786de8ebc5fcdd71507ccb31cf1d0944e78 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 16 Apr 2025 17:21:26 +0100 Subject: [PATCH 54/85] Initial tests for configure-generators --- tests/examples/instrument.sql | 65 +++++++++++++++++++ tests/test_interactive.py | 113 +++++++++++++++++++++++++++++++--- 2 files changed, 170 insertions(+), 8 deletions(-) create mode 100644 tests/examples/instrument.sql diff --git a/tests/examples/instrument.sql b/tests/examples/instrument.sql new file mode 100644 index 00000000..c61598a1 --- /dev/null +++ b/tests/examples/instrument.sql @@ -0,0 +1,65 @@ +-- DROP DATABASE IF EXISTS instrument WITH (FORCE); +CREATE DATABASE instrument WITH TEMPLATE template0 ENCODING = 'UTF8' LOCALE = 'en_US.utf8'; +ALTER DATABASE instrument OWNER TO postgres; + +\connect instrument + +CREATE TABLE public.manufacturer ( + id INTEGER NOT NULL, + name TEXT NOT NULL, + founded TIMESTAMP WITH TIME ZONE NOT NULL +); + +ALTER TABLE ONLY public.manufacturer ADD CONSTRAINT manufacturer_pkey PRIMARY KEY (id); + +ALTER TABLE public.manufacturer OWNER TO postgres; + +INSERT INTO public.manufacturer VALUES (1, 'Blender', 'January 8 04:05:06 1951 PST'); +INSERT INTO public.manufacturer VALUES (2, 'Gibbs', 'March 4 07:08:09 1959 PST'); + +CREATE TABLE public.model ( + id INTEGER NOT NULL, + name TEXT NOT NULL, + manufacturer_id INTEGER NOT NULL, + introduced TIMESTAMP WITH TIME ZONE NOT NULL +); + +ALTER TABLE ONLY public.model ADD CONSTRAINT model_pkey PRIMARY KEY (id); +ALTER TABLE ONLY public.model + ADD CONSTRAINT concept_manufacturer_id_fkey FOREIGN KEY (manufacturer_id) REFERENCES public.manufacturer(id); + +ALTER TABLE public.model OWNER TO postgres; + +INSERT INTO public.model VALUES (1, 'S-Type', 1, 'April 20 04:05:06 1952 PST'); +INSERT INTO public.model VALUES (2, 'Pulse', 1, 'December 2 02:15:06 1953 PST'); +INSERT INTO public.model VALUES (3, 'Paul Leslie', 2, 'February 20 04:05:06 1960 PST'); + +CREATE TABLE public.string ( + id INTEGER NOT NULL, + model_id INTEGER NOT NULL, + position INTEGER NOT NULL, + frequency FLOAT NOT NULL +); + +ALTER TABLE ONLY public.string ADD CONSTRAINT string_pkey PRIMARY KEY (id); +ALTER TABLE ONLY public.string + ADD CONSTRAINT concept_model_id_fkey FOREIGN KEY (model_id) REFERENCES public.model(id); + +ALTER TABLE public.string OWNER TO postgres; + +INSERT INTO public.string VALUES (1, 1, 1, 329.6); +INSERT INTO public.string VALUES (2, 1, 2, 246.94); +INSERT INTO public.string VALUES (3, 1, 3, 196); +INSERT INTO public.string VALUES (4, 1, 4, 146.83); +INSERT INTO public.string VALUES (5, 1, 5, 110); +INSERT INTO public.string VALUES (6, 1, 6, 82.4); +INSERT INTO public.string VALUES (7, 2, 1, 98); +INSERT INTO public.string VALUES (8, 2, 2, 73.42); +INSERT INTO public.string VALUES (9, 2, 3, 55); +INSERT INTO public.string VALUES (10, 2, 4, 30.87); +INSERT INTO public.string VALUES (11, 3, 1, 329.6); +INSERT INTO public.string VALUES (12, 3, 2, 246.94); +INSERT INTO public.string VALUES (13, 3, 3, 196); +INSERT INTO public.string VALUES (14, 3, 4, 146.83); +INSERT INTO public.string VALUES (15, 3, 5, 110); +INSERT INTO public.string VALUES (16, 3, 6, 82.4); diff --git a/tests/test_interactive.py b/tests/test_interactive.py index b31af81b..3cee2b21 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -2,19 +2,19 @@ from sqlalchemy import MetaData, select from sqlalchemy.orm import declarative_base -from sqlsynthgen.interactive import TableCmd +from sqlsynthgen.interactive import DbCmd, TableCmd, GeneratorCmd from tests.utils import RequiresDBTestCase -class TestTableCmd(TableCmd): +class TestDbCmdMixin(DbCmd): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.reset() def reset(self): - self.messages = [] - self.headings = [] - self.rows = [] - self.column_items = [] + self.messages: list[tuple[str, list, dict[str, any]]] = [] + self.headings: list[str] = [] + self.rows: list[list[str]] = [] + self.column_items: list[str] = [] self.columns: dict[str, list[str]] = {} def print(self, text: str, *args, **kwargs): self.messages.append((text, args, kwargs)) @@ -23,12 +23,16 @@ def print_table(self, headings: list[str], rows: list[list[str]]): self.rows = rows def print_table_by_columns(self, columns: dict[str, list[str]]): self.columns = columns - def columnize(self, items): + def columnize(self, items: list[str]): self.column_items.append(items) - def ask_save(self): + def ask_save(self) -> str: return "yes" +class TestTableCmd(TableCmd, TestDbCmdMixin): + """ TableCmd but mocked """ + + class ConfigureTablesTests(RequiresDBTestCase): """Testing configure-tables.""" dump_file_path = "src.dump" @@ -198,3 +202,96 @@ def test_list_tables(self): self.assertTrue(person_listed) self.assertTrue(unique_constraint_test_listed) self.assertTrue(no_pk_test_listed) + + +class TestGeneratorCmd(GeneratorCmd, TestDbCmdMixin): + """ TableCmd but mocked """ + def get_proposals(self) -> dict[str, tuple[int, str, str, list[str]]]: + """ + Returns a dict of generator name to a tuple of (index, fit_string, [list,of,samples])""" + return { + kw["name"]: (kw["index"], kw["fit"], kw["sample"].split(", ")) + for (s, _, kw) in self.messages + if s == self.PROPOSE_GENERATOR_SAMPLE_TEXT + } + + +class ConfigureTablesTests(RequiresDBTestCase): + """Testing configure-tables.""" + dump_file_path = "instrument.sql" + database_name = "instrument" + schema_name = "public" + + def test_set_generator_mimesis(self): + """ Test that we can set one generator to a mimesis generator. """ + metadata = MetaData() + metadata.reflect(self.engine) + with TestGeneratorCmd(self.dsn, self.schema_name, metadata, {}) as gc: + TABLE = "model" + COLUMN = "name" + GENERATOR = "person.first_name" + gc.do_next(f"{TABLE}.{COLUMN}") + gc.do_propose("") + proposals = gc.get_proposals() + gc.do_set(str(proposals[f"generic.{GENERATOR}"][0])) + gc.do_quit("") + self.assertEqual(len(gc.config["tables"][TABLE]["row_generators"]), 1) + self.assertDictEqual( + gc.config["tables"][TABLE]["row_generators"][0], + {"name": f"generic.{GENERATOR}", "columns_assigned": [COLUMN]}, + ) + + def test_set_generator_distribution(self): + """ Test that we can set one generator to gaussian. """ + metadata = MetaData() + metadata.reflect(self.engine) + with TestGeneratorCmd(self.dsn, self.schema_name, metadata, {}) as gc: + TABLE = "string" + COLUMN = "frequency" + GENERATOR = "dist_gen.normal" + gc.do_next(f"{TABLE}.{COLUMN}") + gc.do_propose("") + proposals = gc.get_proposals() + gc.do_set(str(proposals[GENERATOR][0])) + gc.do_quit("") + row_gens = gc.config["tables"][TABLE]["row_generators"] + self.assertEqual(len(row_gens), 1) + row_gen = row_gens[0] + self.assertEqual(row_gen["name"], GENERATOR) + self.assertListEqual(row_gen["columns_assigned"], [COLUMN]) + self.assertDictEqual(row_gen["kwargs"], { + "mean": f'SRC_STATS["auto__{TABLE}"]["mean__{COLUMN}"]', + "sd": f'SRC_STATS["auto__{TABLE}"]["stddev__{COLUMN}"]', + }) + self.assertEqual(len(gc.config["src-stats"]), 1) + self.assertDictEqual(gc.config["src-stats"][0], { + "name": f"auto__{TABLE}", + "query": f"SELECT AVG({COLUMN}) AS mean__{COLUMN}, STDDEV({COLUMN}) AS stddev__{COLUMN} FROM {TABLE}", + }) + + def test_set_generator_choice(self): + """ Test that we can set one generator to uniform choice. """ + metadata = MetaData() + metadata.reflect(self.engine) + with TestGeneratorCmd(self.dsn, self.schema_name, metadata, {}) as gc: + TABLE = "string" + COLUMN = "frequency" + GENERATOR = "dist_gen.choice" + gc.do_next(f"{TABLE}.{COLUMN}") + gc.do_propose("") + proposals = gc.get_proposals() + gc.do_set(str(proposals[GENERATOR][0])) + gc.do_quit("") + row_gens = gc.config["tables"][TABLE]["row_generators"] + self.assertEqual(len(row_gens), 1) + row_gen = row_gens[0] + self.assertEqual(row_gen["name"], GENERATOR) + self.assertListEqual(row_gen["columns_assigned"], [COLUMN]) + self.assertDictEqual(row_gen["kwargs"], { + "a": f'SRC_STATS["auto__{TABLE}__{COLUMN}"]["value"]', + }) + self.assertEqual(len(gc.config["src-stats"]), 1) + self.assertDictEqual(gc.config["src-stats"][0], { + "name": f"auto__{TABLE}__{COLUMN}", + "query": f"SELECT {COLUMN} AS value FROM {TABLE} GROUP BY value ORDER BY COUNT({COLUMN}) DESC", + }) From b5f81309f383ebb9f3eab3243b9d09fdf0a513b1 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 16 Apr 2025 17:47:14 +0100 Subject: [PATCH 55/85] configure-generators keeps old generators fixed stories --- poetry.lock | 1306 ++++++++++++++++++------------------ pyproject.toml | 4 +- sqlsynthgen/create.py | 139 ++-- sqlsynthgen/generators.py | 35 +- sqlsynthgen/interactive.py | 51 +- tests/test_create.py | 7 +- tests/test_interactive.py | 52 ++ 7 files changed, 885 insertions(+), 709 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2ec43f02..632700a1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -34,13 +34,13 @@ files = [ [[package]] name = "astroid" -version = "3.3.6" +version = "3.3.9" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" files = [ - {file = "astroid-3.3.6-py3-none-any.whl", hash = "sha256:db676dc4f3ae6bfe31cda227dc60e03438378d7a896aec57422c95634e8d722f"}, - {file = "astroid-3.3.6.tar.gz", hash = "sha256:6aaea045f938c735ead292204afdb977a36e989522b7833ef6fea94de743f442"}, + {file = "astroid-3.3.9-py3-none-any.whl", hash = "sha256:d05bfd0acba96a7bd43e222828b7d9bc1e138aaeb0649707908d3702a9831248"}, + {file = "astroid-3.3.9.tar.gz", hash = "sha256:622cc8e3048684aa42c820d9d218978021c3c3d174fb03a9f0d615921744f550"}, ] [package.dependencies] @@ -125,36 +125,36 @@ test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0 [[package]] name = "attrs" -version = "24.2.0" +version = "25.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "babel" -version = "2.16.0" +version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "black" @@ -204,13 +204,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.8.30" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] @@ -226,127 +226,114 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -365,18 +352,39 @@ files = [ [[package]] name = "dataclasses-json" -version = "0.5.14" -description = "Easily serialize dataclasses to and from JSON." +version = "0.5.9" +description = "Easily serialize dataclasses to and from JSON" optional = false -python-versions = ">=3.7,<3.13" +python-versions = ">=3.6" files = [ - {file = "dataclasses_json-0.5.14-py3-none-any.whl", hash = "sha256:5ec6fed642adb1dbdb4182badb01e0861badfd8fda82e3b67f44b2d1e9d10d21"}, - {file = "dataclasses_json-0.5.14.tar.gz", hash = "sha256:d82896a94c992ffaf689cd1fafc180164e2abdd415b8f94a7f78586af5886236"}, + {file = "dataclasses-json-0.5.9.tar.gz", hash = "sha256:e9ac87b73edc0141aafbce02b44e93553c3123ad574958f0fe52a534b6707e8e"}, + {file = "dataclasses_json-0.5.9-py3-none-any.whl", hash = "sha256:1280542631df1c375b7bc92e5b86d39e06c44760d7e3571a537b3b8acabf2f0c"}, ] [package.dependencies] -marshmallow = ">=3.18.0,<4.0.0" -typing-inspect = ">=0.4.0,<1" +marshmallow = ">=3.3.0,<4.0.0" +marshmallow-enum = ">=1.5.1,<2.0.0" +typing-inspect = ">=0.4.0" + +[package.extras] +dev = ["flake8", "hypothesis", "ipython", "mypy (>=0.710)", "portray", "pytest (>=7.2.0)", "setuptools", "simplejson", "twine", "types-dataclasses", "wheel"] + +[[package]] +name = "deprecated" +version = "1.2.18" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, + {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] [[package]] name = "dill" @@ -417,35 +425,35 @@ files = [ [[package]] name = "filelock" -version = "3.16.1" +version = "3.18.0" description = "A platform independent file lock." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, - {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "graphviz" -version = "0.17" +version = "0.20.3" description = "Simple Python interface for Graphviz" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "graphviz-0.17-py3-none-any.whl", hash = "sha256:5dadec94046d82adaae6019311a30e0487536d9d5a60d85451f0ba32f9fc6559"}, - {file = "graphviz-0.17.zip", hash = "sha256:ef6e2c5deb9cdcc0c7eece1d89625fd07b0f2208ea2bcb483520907ddf8b4e12"}, + {file = "graphviz-0.20.3-py3-none-any.whl", hash = "sha256:81f848f2904515d8cd359cc611faba817598d2feaac4027b266aa3eda7b3dde5"}, + {file = "graphviz-0.20.3.zip", hash = "sha256:09d6bc81e6a9fa392e7ba52135a9d49f1ed62526f96499325930e87ca1b5925d"}, ] [package.extras] dev = ["flake8", "pep8-naming", "tox (>=3)", "twine", "wheel"] -docs = ["sphinx (>=1.8)", "sphinx-autodoc-typehints", "sphinx-rtd-theme"] -test = ["mock (>=3)", "pytest (>=5.2)", "pytest-cov", "pytest-mock (>=2)"] +docs = ["sphinx (>=5,<7)", "sphinx-autodoc-typehints", "sphinx-rtd-theme"] +test = ["coverage", "pytest (>=7,<8.1)", "pytest-cov", "pytest-mock (>=3)"] [[package]] name = "greenlet" @@ -545,13 +553,13 @@ files = [ [[package]] name = "identify" -version = "2.6.3" +version = "2.6.9" description = "File identification library for Python" optional = false python-versions = ">=3.9" files = [ - {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"}, - {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"}, + {file = "identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150"}, + {file = "identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf"}, ] [package.extras] @@ -582,29 +590,6 @@ files = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] -[[package]] -name = "importlib-metadata" -version = "8.5.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - [[package]] name = "isort" version = "5.13.2" @@ -621,13 +606,13 @@ colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -696,13 +681,13 @@ referencing = ">=0.31.0" [[package]] name = "markdown2" -version = "2.5.1" +version = "2.5.3" description = "A fast and complete Python implementation of Markdown" optional = false -python-versions = "<4,>=3.8" +python-versions = "<4,>=3.9" files = [ - {file = "markdown2-2.5.1-py2.py3-none-any.whl", hash = "sha256:190ae60a4bd0425c60c863bede18a9f3d45b1cbf3fbc9f40b4fac336ff2c520b"}, - {file = "markdown2-2.5.1.tar.gz", hash = "sha256:12fc04ea5a87f7bb4b65acf5bf3af1183b20838cc7d543b74c92ec7eea4bbc74"}, + {file = "markdown2-2.5.3-py3-none-any.whl", hash = "sha256:a8ebb7e84b8519c37bf7382b3db600f1798a22c245bfd754a1f87ca8d7ea63b3"}, + {file = "markdown2-2.5.3.tar.gz", hash = "sha256:4d502953a4633408b0ab3ec503c5d6984d1b14307e32b325ec7d16ea57524895"}, ] [package.extras] @@ -782,13 +767,13 @@ files = [ [[package]] name = "marshmallow" -version = "3.23.1" +version = "3.26.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" files = [ - {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, - {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"}, + {file = "marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c"}, + {file = "marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6"}, ] [package.dependencies] @@ -796,9 +781,23 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"] tests = ["pytest", "simplejson"] +[[package]] +name = "marshmallow-enum" +version = "1.5.1" +description = "Enum field for Marshmallow" +optional = false +python-versions = "*" +files = [ + {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, + {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, +] + +[package.dependencies] +marshmallow = ">=2.0.0" + [[package]] name = "mccabe" version = "0.7.0" @@ -812,60 +811,64 @@ files = [ [[package]] name = "mimesis" -version = "6.1.1" +version = "18.0.0" description = "Mimesis: Fake Data Generator." optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.10" files = [ - {file = "mimesis-6.1.1-py3-none-any.whl", hash = "sha256:eabe41d7afa23b01dffb51ebd9e10837df6417fef02fa9841989ca886e479790"}, - {file = "mimesis-6.1.1.tar.gz", hash = "sha256:044ac378c61db0e06832ff722548fd6e604881d36bc938002e0bd5b85eeb6a98"}, + {file = "mimesis-18.0.0-py3-none-any.whl", hash = "sha256:a51854a5ce63ebf2bd6a98e8841412e04cede38593be7e16d1d712848e6273df"}, + {file = "mimesis-18.0.0.tar.gz", hash = "sha256:7d7c76ecd680ae48afe8dc4413ef1ef1ee7ef20e16f9f9cb42892add642fc1b2"}, ] +[package.extras] +factory = ["factory-boy (>=3.3.0,<4.0.0)"] +pytest = ["pytest (>=7.2,<8.0)"] + [[package]] name = "mypy" -version = "1.13.0" +version = "1.15.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, - {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, - {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, - {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, - {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, - {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, - {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, - {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, - {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, - {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, - {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, - {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, - {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, - {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, - {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, - {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, - {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, - {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, - {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, - {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, - {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, - {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" +mypy_extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -898,151 +901,92 @@ files = [ [[package]] name = "numpy" -version = "2.0.2" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, - {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, - {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, - {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, - {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, - {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, - {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, - {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, - {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, - {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, -] - -[[package]] -name = "numpy" -version = "2.2.0" +version = "2.2.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" files = [ - {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, - {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, - {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, - {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, - {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, - {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, - {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, - {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, - {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, - {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, - {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, - {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, - {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, - {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, - {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, - {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, - {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, - {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, - {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, - {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, - {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, - {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, - {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, - {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, - {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, - {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, - {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, - {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, - {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, - {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, - {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, - {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, - {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, - {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, - {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, - {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, - {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, - {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, + {file = "numpy-2.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8146f3550d627252269ac42ae660281d673eb6f8b32f113538e0cc2a9aed42b9"}, + {file = "numpy-2.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e642d86b8f956098b564a45e6f6ce68a22c2c97a04f5acd3f221f57b8cb850ae"}, + {file = "numpy-2.2.4-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:a84eda42bd12edc36eb5b53bbcc9b406820d3353f1994b6cfe453a33ff101775"}, + {file = "numpy-2.2.4-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:4ba5054787e89c59c593a4169830ab362ac2bee8a969249dc56e5d7d20ff8df9"}, + {file = "numpy-2.2.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7716e4a9b7af82c06a2543c53ca476fa0b57e4d760481273e09da04b74ee6ee2"}, + {file = "numpy-2.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf8c1d66f432ce577d0197dceaac2ac00c0759f573f28516246351c58a85020"}, + {file = "numpy-2.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:218f061d2faa73621fa23d6359442b0fc658d5b9a70801373625d958259eaca3"}, + {file = "numpy-2.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:df2f57871a96bbc1b69733cd4c51dc33bea66146b8c63cacbfed73eec0883017"}, + {file = "numpy-2.2.4-cp310-cp310-win32.whl", hash = "sha256:a0258ad1f44f138b791327961caedffbf9612bfa504ab9597157806faa95194a"}, + {file = "numpy-2.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:0d54974f9cf14acf49c60f0f7f4084b6579d24d439453d5fc5805d46a165b542"}, + {file = "numpy-2.2.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e9e0a277bb2eb5d8a7407e14688b85fd8ad628ee4e0c7930415687b6564207a4"}, + {file = "numpy-2.2.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eeea959168ea555e556b8188da5fa7831e21d91ce031e95ce23747b7609f8a4"}, + {file = "numpy-2.2.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bd3ad3b0a40e713fc68f99ecfd07124195333f1e689387c180813f0e94309d6f"}, + {file = "numpy-2.2.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cf28633d64294969c019c6df4ff37f5698e8326db68cc2b66576a51fad634880"}, + {file = "numpy-2.2.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fa8fa7697ad1646b5c93de1719965844e004fcad23c91228aca1cf0800044a1"}, + {file = "numpy-2.2.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4162988a360a29af158aeb4a2f4f09ffed6a969c9776f8f3bdee9b06a8ab7e5"}, + {file = "numpy-2.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:892c10d6a73e0f14935c31229e03325a7b3093fafd6ce0af704be7f894d95687"}, + {file = "numpy-2.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db1f1c22173ac1c58db249ae48aa7ead29f534b9a948bc56828337aa84a32ed6"}, + {file = "numpy-2.2.4-cp311-cp311-win32.whl", hash = "sha256:ea2bb7e2ae9e37d96835b3576a4fa4b3a97592fbea8ef7c3587078b0068b8f09"}, + {file = "numpy-2.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:f7de08cbe5551911886d1ab60de58448c6df0f67d9feb7d1fb21e9875ef95e91"}, + {file = "numpy-2.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a7b9084668aa0f64e64bd00d27ba5146ef1c3a8835f3bd912e7a9e01326804c4"}, + {file = "numpy-2.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbe512c511956b893d2dacd007d955a3f03d555ae05cfa3ff1c1ff6df8851854"}, + {file = "numpy-2.2.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bb649f8b207ab07caebba230d851b579a3c8711a851d29efe15008e31bb4de24"}, + {file = "numpy-2.2.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:f34dc300df798742b3d06515aa2a0aee20941c13579d7a2f2e10af01ae4901ee"}, + {file = "numpy-2.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3f7ac96b16955634e223b579a3e5798df59007ca43e8d451a0e6a50f6bfdfba"}, + {file = "numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f92084defa704deadd4e0a5ab1dc52d8ac9e8a8ef617f3fbb853e79b0ea3592"}, + {file = "numpy-2.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4e84a6283b36632e2a5b56e121961f6542ab886bc9e12f8f9818b3c266bfbb"}, + {file = "numpy-2.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:11c43995255eb4127115956495f43e9343736edb7fcdb0d973defd9de14cd84f"}, + {file = "numpy-2.2.4-cp312-cp312-win32.whl", hash = "sha256:65ef3468b53269eb5fdb3a5c09508c032b793da03251d5f8722b1194f1790c00"}, + {file = "numpy-2.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:2aad3c17ed2ff455b8eaafe06bcdae0062a1db77cb99f4b9cbb5f4ecb13c5146"}, + {file = "numpy-2.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cf4e5c6a278d620dee9ddeb487dc6a860f9b199eadeecc567f777daace1e9e7"}, + {file = "numpy-2.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1974afec0b479e50438fc3648974268f972e2d908ddb6d7fb634598cdb8260a0"}, + {file = "numpy-2.2.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:79bd5f0a02aa16808fcbc79a9a376a147cc1045f7dfe44c6e7d53fa8b8a79392"}, + {file = "numpy-2.2.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:3387dd7232804b341165cedcb90694565a6015433ee076c6754775e85d86f1fc"}, + {file = "numpy-2.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f527d8fdb0286fd2fd97a2a96c6be17ba4232da346931d967a0630050dfd298"}, + {file = "numpy-2.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce43e386c16898b91e162e5baaad90c4b06f9dcbe36282490032cec98dc8ae7"}, + {file = "numpy-2.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31504f970f563d99f71a3512d0c01a645b692b12a63630d6aafa0939e52361e6"}, + {file = "numpy-2.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:81413336ef121a6ba746892fad881a83351ee3e1e4011f52e97fba79233611fd"}, + {file = "numpy-2.2.4-cp313-cp313-win32.whl", hash = "sha256:f486038e44caa08dbd97275a9a35a283a8f1d2f0ee60ac260a1790e76660833c"}, + {file = "numpy-2.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:207a2b8441cc8b6a2a78c9ddc64d00d20c303d79fba08c577752f080c4007ee3"}, + {file = "numpy-2.2.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8120575cb4882318c791f839a4fd66161a6fa46f3f0a5e613071aae35b5dd8f8"}, + {file = "numpy-2.2.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a761ba0fa886a7bb33c6c8f6f20213735cb19642c580a931c625ee377ee8bd39"}, + {file = "numpy-2.2.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ac0280f1ba4a4bfff363a99a6aceed4f8e123f8a9b234c89140f5e894e452ecd"}, + {file = "numpy-2.2.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:879cf3a9a2b53a4672a168c21375166171bc3932b7e21f622201811c43cdd3b0"}, + {file = "numpy-2.2.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05d4198c1bacc9124018109c5fba2f3201dbe7ab6e92ff100494f236209c960"}, + {file = "numpy-2.2.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f085ce2e813a50dfd0e01fbfc0c12bbe5d2063d99f8b29da30e544fb6483b8"}, + {file = "numpy-2.2.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:92bda934a791c01d6d9d8e038363c50918ef7c40601552a58ac84c9613a665bc"}, + {file = "numpy-2.2.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ee4d528022f4c5ff67332469e10efe06a267e32f4067dc76bb7e2cddf3cd25ff"}, + {file = "numpy-2.2.4-cp313-cp313t-win32.whl", hash = "sha256:05c076d531e9998e7e694c36e8b349969c56eadd2cdcd07242958489d79a7286"}, + {file = "numpy-2.2.4-cp313-cp313t-win_amd64.whl", hash = "sha256:188dcbca89834cc2e14eb2f106c96d6d46f200fe0200310fc29089657379c58d"}, + {file = "numpy-2.2.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7051ee569db5fbac144335e0f3b9c2337e0c8d5c9fee015f259a5bd70772b7e8"}, + {file = "numpy-2.2.4-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ab2939cd5bec30a7430cbdb2287b63151b77cf9624de0532d629c9a1c59b1d5c"}, + {file = "numpy-2.2.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f35b19894a9e08639fd60a1ec1978cb7f5f7f1eace62f38dd36be8aecdef4d"}, + {file = "numpy-2.2.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b4adfbbc64014976d2f91084915ca4e626fbf2057fb81af209c1a6d776d23e3d"}, + {file = "numpy-2.2.4.tar.gz", hash = "sha256:9ba03692a45d3eef66559efe1d1096c4b9b75c0986b5dff5530c378fb8331d4f"}, ] [[package]] name = "opendp" -version = "0.10.0" +version = "0.12.1" description = "Python bindings for the OpenDP Library" optional = false python-versions = ">=3.9" files = [ - {file = "opendp-0.10.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:73b1d9e200d44df1e4f6ca84659080bbe00cb50c2cdd96bfae59dcb7393469d1"}, - {file = "opendp-0.10.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:d75d1bd67ec031e8427e40d465c71f4ae80fe0ed4e9619c8881d54b52dfa8969"}, - {file = "opendp-0.10.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffbebb654b40e4b9df725fb47088786b54786046c9f47244a5286a92895fe975"}, - {file = "opendp-0.10.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1907ade70eea733d65e418548ef326986fe7b8466394202872bb6f7c86e452be"}, - {file = "opendp-0.10.0-cp39-abi3-win32.whl", hash = "sha256:b27090536a512e0df266f92d3edd6ce959cc0c3a63d059ce39ec6e0a532eba80"}, - {file = "opendp-0.10.0-cp39-abi3-win_amd64.whl", hash = "sha256:cdf774428679c3191ce421b0729d513abb47bd2247da97484b9bf155b9bf6aa2"}, - {file = "opendp-0.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0090fd6543f52dffd5a6bc9e78695543f74c0b1dfd0584d172a577c4d40c3752"}, - {file = "opendp-0.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21c2a82c101a6996f6bd27c0e573bca8d5d9c7e5db85456c3f3e713400cc60f0"}, - {file = "opendp-0.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f35fefa8f68fd3d28208c56fdfe409f0454bcb1dddb4a77d511e215be0bc552"}, - {file = "opendp-0.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f4a36903e1be544f339a5f083dfccd195a278acf2bae044fb3d9884c3a4f340f"}, - {file = "opendp-0.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0de654a3b81ba8d0266ef3c7e827d477caf5fdaf83397fd859f334ff4d2be774"}, - {file = "opendp-0.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6ab7ed35b778ddfa6008323527ae8b2c05c400c3023f881dd1f7598ec3b9691"}, - {file = "opendp-0.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34769cd8e6b8dfc11a8411bae240b07c3333e344f14cef5548bbfe48fa7bd9c1"}, - {file = "opendp-0.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4a260206ad6204c3e6149448597fe775e9202a48cd3ead2f0a3a1962280412d3"}, - {file = "opendp-0.10.0-py3-none-manylinux_2_24_aarch64.whl", hash = "sha256:f5ed014e32885fbb6e7d2e3316eeb48ebe941ae00d0d0a8bd2a424f4d2d9993a"}, - {file = "opendp-0.10.0.tar.gz", hash = "sha256:80598450c9be450ae6950c7f5fedfd2482ee4e419f511db8ba7f76d7db6d66b1"}, + {file = "opendp-0.12.1-cp39-abi3-macosx_10_13_x86_64.whl", hash = "sha256:72edcd516e606a983ceaf828663655e46ed7d2a712e6335845413672ce10b89a"}, + {file = "opendp-0.12.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:6315380316fada9fd051ac0d0e46d323da1c9a509f0a808908a9c980be4f448a"}, + {file = "opendp-0.12.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c95c3ab7b8e61f94a76f08aafb9dc099d761845ad85da4f217c153fb612ab545"}, + {file = "opendp-0.12.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b66cc7bcacbacd5a911a536b0fb463b2084964fed38b1466ca266e54d717ba67"}, + {file = "opendp-0.12.1-cp39-abi3-win32.whl", hash = "sha256:b714a4776dfe8af01f6d480d56b5d7595b645da9379acdaf98cd29111df88725"}, + {file = "opendp-0.12.1-cp39-abi3-win_amd64.whl", hash = "sha256:4590b5297c3572456ecaab451e5560d4bd5fec645f509b49df52625220b2a286"}, + {file = "opendp-0.12.1-py3-none-manylinux_2_24_aarch64.whl", hash = "sha256:2258ef20a959f5b6acd97a2b611a26a68a69a34647fbf72d85d72420d41c9e4e"}, + {file = "opendp-0.12.1.tar.gz", hash = "sha256:5b17a83733c903958a49ef2fd72e9620169bc0f7bab7c03a20aba66bfdc3fa2e"}, ] +[package.dependencies] +deprecated = "*" + [package.extras] -numpy = ["numpy (>=1.17,<2.0)", "randomgen"] -polars = ["numpy (>=1.17,<2.0)", "polars (==0.20.16)", "pyarrow", "randomgen", "scikit-learn"] -scikit-learn = ["numpy (>=1.17,<2.0)", "randomgen", "scikit-learn"] +numpy = ["numpy", "randomgen (>=2.0.0)"] +polars = ["numpy", "polars (==1.12.0)", "pyarrow", "randomgen (>=2.0.0)", "scikit-learn"] +scikit-learn = ["numpy", "randomgen (>=2.0.0)", "scikit-learn"] [[package]] name = "packaging" @@ -1180,19 +1124,19 @@ scramp = ">=1.4.5" [[package]] name = "platformdirs" -version = "4.3.6" +version = "4.3.7" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, + {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, + {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] [[package]] name = "pockets" @@ -1228,13 +1172,13 @@ virtualenv = ">=20.10.0" [[package]] name = "prettytable" -version = "3.15.1" +version = "3.16.0" description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" optional = false python-versions = ">=3.9" files = [ - {file = "prettytable-3.15.1-py3-none-any.whl", hash = "sha256:1bb0da7437e904ec879d2998aded19abc722719aa3d384a7faa44dcbe4aeb2e9"}, - {file = "prettytable-3.15.1.tar.gz", hash = "sha256:f0edb38060cb9161b2417939bfd5cd9877da73388fb19d1e8bf7987e8558896e"}, + {file = "prettytable-3.16.0-py3-none-any.whl", hash = "sha256:b5eccfabb82222f5aa46b798ff02a8452cf530a352c31bddfa29be41242863aa"}, + {file = "prettytable-3.16.0.tar.gz", hash = "sha256:3c64b31719d961bf69c9a7e03d0c1e477320906a98da63952bc6698d6164ff57"}, ] [package.dependencies] @@ -1321,54 +1265,61 @@ files = [ [[package]] name = "pydantic" -version = "1.10.19" +version = "1.10.21" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a415b9e95fa602b10808113967f72b2da8722061265d6af69268c111c254832d"}, - {file = "pydantic-1.10.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:11965f421f7eb026439d4eb7464e9182fe6d69c3d4d416e464a4485d1ba61ab6"}, - {file = "pydantic-1.10.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5bb81fcfc6d5bff62cd786cbd87480a11d23f16d5376ad2e057c02b3b44df96"}, - {file = "pydantic-1.10.19-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83ee8c9916689f8e6e7d90161e6663ac876be2efd32f61fdcfa3a15e87d4e413"}, - {file = "pydantic-1.10.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0399094464ae7f28482de22383e667625e38e1516d6b213176df1acdd0c477ea"}, - {file = "pydantic-1.10.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8b2cf5e26da84f2d2dee3f60a3f1782adedcee785567a19b68d0af7e1534bd1f"}, - {file = "pydantic-1.10.19-cp310-cp310-win_amd64.whl", hash = "sha256:1fc8cc264afaf47ae6a9bcbd36c018d0c6b89293835d7fb0e5e1a95898062d59"}, - {file = "pydantic-1.10.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7a8a1dd68bac29f08f0a3147de1885f4dccec35d4ea926e6e637fac03cdb4b3"}, - {file = "pydantic-1.10.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07d00ca5ef0de65dd274005433ce2bb623730271d495a7d190a91c19c5679d34"}, - {file = "pydantic-1.10.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad57004e5d73aee36f1e25e4e73a4bc853b473a1c30f652dc8d86b0a987ffce3"}, - {file = "pydantic-1.10.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dce355fe7ae53e3090f7f5fa242423c3a7b53260747aa398b4b3aaf8b25f41c3"}, - {file = "pydantic-1.10.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0d32227ea9a3bf537a2273fd2fdb6d64ab4d9b83acd9e4e09310a777baaabb98"}, - {file = "pydantic-1.10.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e351df83d1c9cffa53d4e779009a093be70f1d5c6bb7068584086f6a19042526"}, - {file = "pydantic-1.10.19-cp311-cp311-win_amd64.whl", hash = "sha256:d8d72553d2f3f57ce547de4fa7dc8e3859927784ab2c88343f1fc1360ff17a08"}, - {file = "pydantic-1.10.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d5b5b7c6bafaef90cbb7dafcb225b763edd71d9e22489647ee7df49d6d341890"}, - {file = "pydantic-1.10.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:570ad0aeaf98b5e33ff41af75aba2ef6604ee25ce0431ecd734a28e74a208555"}, - {file = "pydantic-1.10.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0890fbd7fec9e151c7512941243d830b2d6076d5df159a2030952d480ab80a4e"}, - {file = "pydantic-1.10.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec5c44e6e9eac5128a9bfd21610df3b8c6b17343285cc185105686888dc81206"}, - {file = "pydantic-1.10.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6eb56074b11a696e0b66c7181da682e88c00e5cebe6570af8013fcae5e63e186"}, - {file = "pydantic-1.10.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d7d48fbc5289efd23982a0d68e973a1f37d49064ccd36d86de4543aff21e086"}, - {file = "pydantic-1.10.19-cp312-cp312-win_amd64.whl", hash = "sha256:fd34012691fbd4e67bdf4accb1f0682342101015b78327eaae3543583fcd451e"}, - {file = "pydantic-1.10.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a5d5b877c7d3d9e17399571a8ab042081d22fe6904416a8b20f8af5909e6c8f"}, - {file = "pydantic-1.10.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c46f58ef2df958ed2ea7437a8be0897d5efe9ee480818405338c7da88186fb3"}, - {file = "pydantic-1.10.19-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d8a38a44bb6a15810084316ed69c854a7c06e0c99c5429f1d664ad52cec353c"}, - {file = "pydantic-1.10.19-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a82746c6d6e91ca17e75f7f333ed41d70fce93af520a8437821dec3ee52dfb10"}, - {file = "pydantic-1.10.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:566bebdbe6bc0ac593fa0f67d62febbad9f8be5433f686dc56401ba4aab034e3"}, - {file = "pydantic-1.10.19-cp37-cp37m-win_amd64.whl", hash = "sha256:22a1794e01591884741be56c6fba157c4e99dcc9244beb5a87bd4aa54b84ea8b"}, - {file = "pydantic-1.10.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:076c49e24b73d346c45f9282d00dbfc16eef7ae27c970583d499f11110d9e5b0"}, - {file = "pydantic-1.10.19-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d4320510682d5a6c88766b2a286d03b87bd3562bf8d78c73d63bab04b21e7b4"}, - {file = "pydantic-1.10.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e66aa0fa7f8aa9d0a620361834f6eb60d01d3e9cea23ca1a92cda99e6f61dac"}, - {file = "pydantic-1.10.19-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d216f8d0484d88ab72ab45d699ac669fe031275e3fa6553e3804e69485449fa0"}, - {file = "pydantic-1.10.19-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f28a81978e936136c44e6a70c65bde7548d87f3807260f73aeffbf76fb94c2f"}, - {file = "pydantic-1.10.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d3449633c207ec3d2d672eedb3edbe753e29bd4e22d2e42a37a2c1406564c20f"}, - {file = "pydantic-1.10.19-cp38-cp38-win_amd64.whl", hash = "sha256:7ea24e8614f541d69ea72759ff635df0e612b7dc9d264d43f51364df310081a3"}, - {file = "pydantic-1.10.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:573254d844f3e64093f72fcd922561d9c5696821ff0900a0db989d8c06ab0c25"}, - {file = "pydantic-1.10.19-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff09600cebe957ecbb4a27496fe34c1d449e7957ed20a202d5029a71a8af2e35"}, - {file = "pydantic-1.10.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4739c206bfb6bb2bdc78dcd40bfcebb2361add4ceac6d170e741bb914e9eff0f"}, - {file = "pydantic-1.10.19-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bfb5b378b78229119d66ced6adac2e933c67a0aa1d0a7adffbe432f3ec14ce4"}, - {file = "pydantic-1.10.19-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7f31742c95e3f9443b8c6fa07c119623e61d76603be9c0d390bcf7e888acabcb"}, - {file = "pydantic-1.10.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6444368b651a14c2ce2fb22145e1496f7ab23cbdb978590d47c8d34a7bc0289"}, - {file = "pydantic-1.10.19-cp39-cp39-win_amd64.whl", hash = "sha256:945407f4d08cd12485757a281fca0e5b41408606228612f421aa4ea1b63a095d"}, - {file = "pydantic-1.10.19-py3-none-any.whl", hash = "sha256:2206a1752d9fac011e95ca83926a269fb0ef5536f7e053966d058316e24d929f"}, - {file = "pydantic-1.10.19.tar.gz", hash = "sha256:fea36c2065b7a1d28c6819cc2e93387b43dd5d3cf5a1e82d8132ee23f36d1f10"}, + {file = "pydantic-1.10.21-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:245e486e0fec53ec2366df9cf1cba36e0bbf066af7cd9c974bbbd9ba10e1e586"}, + {file = "pydantic-1.10.21-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c54f8d4c151c1de784c5b93dfbb872067e3414619e10e21e695f7bb84d1d1fd"}, + {file = "pydantic-1.10.21-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b64708009cfabd9c2211295144ff455ec7ceb4c4fb45a07a804309598f36187"}, + {file = "pydantic-1.10.21-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a148410fa0e971ba333358d11a6dea7b48e063de127c2b09ece9d1c1137dde4"}, + {file = "pydantic-1.10.21-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:36ceadef055af06e7756eb4b871cdc9e5a27bdc06a45c820cd94b443de019bbf"}, + {file = "pydantic-1.10.21-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0501e1d12df6ab1211b8cad52d2f7b2cd81f8e8e776d39aa5e71e2998d0379f"}, + {file = "pydantic-1.10.21-cp310-cp310-win_amd64.whl", hash = "sha256:c261127c275d7bce50b26b26c7d8427dcb5c4803e840e913f8d9df3f99dca55f"}, + {file = "pydantic-1.10.21-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8b6350b68566bb6b164fb06a3772e878887f3c857c46c0c534788081cb48adf4"}, + {file = "pydantic-1.10.21-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:935b19fdcde236f4fbf691959fa5c3e2b6951fff132964e869e57c70f2ad1ba3"}, + {file = "pydantic-1.10.21-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b6a04efdcd25486b27f24c1648d5adc1633ad8b4506d0e96e5367f075ed2e0b"}, + {file = "pydantic-1.10.21-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1ba253eb5af8d89864073e6ce8e6c8dec5f49920cff61f38f5c3383e38b1c9f"}, + {file = "pydantic-1.10.21-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:57f0101e6c97b411f287a0b7cf5ebc4e5d3b18254bf926f45a11615d29475793"}, + {file = "pydantic-1.10.21-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90e85834f0370d737c77a386ce505c21b06bfe7086c1c568b70e15a568d9670d"}, + {file = "pydantic-1.10.21-cp311-cp311-win_amd64.whl", hash = "sha256:6a497bc66b3374b7d105763d1d3de76d949287bf28969bff4656206ab8a53aa9"}, + {file = "pydantic-1.10.21-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2ed4a5f13cf160d64aa331ab9017af81f3481cd9fd0e49f1d707b57fe1b9f3ae"}, + {file = "pydantic-1.10.21-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b7693bb6ed3fbe250e222f9415abb73111bb09b73ab90d2d4d53f6390e0ccc1"}, + {file = "pydantic-1.10.21-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185d5f1dff1fead51766da9b2de4f3dc3b8fca39e59383c273f34a6ae254e3e2"}, + {file = "pydantic-1.10.21-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38e6d35cf7cd1727822c79e324fa0677e1a08c88a34f56695101f5ad4d5e20e5"}, + {file = "pydantic-1.10.21-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1d7c332685eafacb64a1a7645b409a166eb7537f23142d26895746f628a3149b"}, + {file = "pydantic-1.10.21-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c9b782db6f993a36092480eeaab8ba0609f786041b01f39c7c52252bda6d85f"}, + {file = "pydantic-1.10.21-cp312-cp312-win_amd64.whl", hash = "sha256:7ce64d23d4e71d9698492479505674c5c5b92cda02b07c91dfc13633b2eef805"}, + {file = "pydantic-1.10.21-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0067935d35044950be781933ab91b9a708eaff124bf860fa2f70aeb1c4be7212"}, + {file = "pydantic-1.10.21-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5e8148c2ce4894ce7e5a4925d9d3fdce429fb0e821b5a8783573f3611933a251"}, + {file = "pydantic-1.10.21-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4973232c98b9b44c78b1233693e5e1938add5af18042f031737e1214455f9b8"}, + {file = "pydantic-1.10.21-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:662bf5ce3c9b1cef32a32a2f4debe00d2f4839fefbebe1d6956e681122a9c839"}, + {file = "pydantic-1.10.21-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:98737c3ab5a2f8a85f2326eebcd214510f898881a290a7939a45ec294743c875"}, + {file = "pydantic-1.10.21-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0bb58bbe65a43483d49f66b6c8474424d551a3fbe8a7796c42da314bac712738"}, + {file = "pydantic-1.10.21-cp313-cp313-win_amd64.whl", hash = "sha256:e622314542fb48542c09c7bd1ac51d71c5632dd3c92dc82ede6da233f55f4848"}, + {file = "pydantic-1.10.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d356aa5b18ef5a24d8081f5c5beb67c0a2a6ff2a953ee38d65a2aa96526b274f"}, + {file = "pydantic-1.10.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08caa8c0468172d27c669abfe9e7d96a8b1655ec0833753e117061febaaadef5"}, + {file = "pydantic-1.10.21-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c677aa39ec737fec932feb68e4a2abe142682f2885558402602cd9746a1c92e8"}, + {file = "pydantic-1.10.21-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:79577cc045d3442c4e845df53df9f9202546e2ba54954c057d253fc17cd16cb1"}, + {file = "pydantic-1.10.21-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:b6b73ab347284719f818acb14f7cd80696c6fdf1bd34feee1955d7a72d2e64ce"}, + {file = "pydantic-1.10.21-cp37-cp37m-win_amd64.whl", hash = "sha256:46cffa24891b06269e12f7e1ec50b73f0c9ab4ce71c2caa4ccf1fb36845e1ff7"}, + {file = "pydantic-1.10.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:298d6f765e3c9825dfa78f24c1efd29af91c3ab1b763e1fd26ae4d9e1749e5c8"}, + {file = "pydantic-1.10.21-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2f4a2305f15eff68f874766d982114ac89468f1c2c0b97640e719cf1a078374"}, + {file = "pydantic-1.10.21-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35b263b60c519354afb3a60107d20470dd5250b3ce54c08753f6975c406d949b"}, + {file = "pydantic-1.10.21-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e23a97a6c2f2db88995496db9387cd1727acdacc85835ba8619dce826c0b11a6"}, + {file = "pydantic-1.10.21-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:3c96fed246ccc1acb2df032ff642459e4ae18b315ecbab4d95c95cfa292e8517"}, + {file = "pydantic-1.10.21-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b92893ebefc0151474f682e7debb6ab38552ce56a90e39a8834734c81f37c8a9"}, + {file = "pydantic-1.10.21-cp38-cp38-win_amd64.whl", hash = "sha256:b8460bc256bf0de821839aea6794bb38a4c0fbd48f949ea51093f6edce0be459"}, + {file = "pydantic-1.10.21-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d387940f0f1a0adb3c44481aa379122d06df8486cc8f652a7b3b0caf08435f7"}, + {file = "pydantic-1.10.21-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:266ecfc384861d7b0b9c214788ddff75a2ea123aa756bcca6b2a1175edeca0fe"}, + {file = "pydantic-1.10.21-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61da798c05a06a362a2f8c5e3ff0341743e2818d0f530eaac0d6898f1b187f1f"}, + {file = "pydantic-1.10.21-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a621742da75ce272d64ea57bd7651ee2a115fa67c0f11d66d9dcfc18c2f1b106"}, + {file = "pydantic-1.10.21-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9e3e4000cd54ef455694b8be9111ea20f66a686fc155feda1ecacf2322b115da"}, + {file = "pydantic-1.10.21-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f198c8206640f4c0ef5a76b779241efb1380a300d88b1bce9bfe95a6362e674d"}, + {file = "pydantic-1.10.21-cp39-cp39-win_amd64.whl", hash = "sha256:e7f0cda108b36a30c8fc882e4fc5b7eec8ef584aa43aa43694c6a7b274fb2b56"}, + {file = "pydantic-1.10.21-py3-none-any.whl", hash = "sha256:db70c920cba9d05c69ad4a9e7f8e9e83011abb2c6490e561de9ae24aee44925c"}, + {file = "pydantic-1.10.21.tar.gz", hash = "sha256:64b48e2b609a6c22178a56c408ee1215a7206077ecb8a193e2fda31858b2362a"}, ] [package.dependencies] @@ -1398,13 +1349,13 @@ toml = ["tomli (>=1.2.3)"] [[package]] name = "pygments" -version = "2.18.0" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] @@ -1412,29 +1363,28 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylint" -version = "3.3.2" +version = "3.3.6" description = "python code static checker" optional = false python-versions = ">=3.9.0" files = [ - {file = "pylint-3.3.2-py3-none-any.whl", hash = "sha256:77f068c287d49b8683cd7c6e624243c74f92890f767f106ffa1ddf3c0a54cb7a"}, - {file = "pylint-3.3.2.tar.gz", hash = "sha256:9ec054ec992cd05ad30a6df1676229739a73f8feeabf3912c995d17601052b01"}, + {file = "pylint-3.3.6-py3-none-any.whl", hash = "sha256:8b7c2d3e86ae3f94fb27703d521dd0b9b6b378775991f504d7c3a6275aa0a6a6"}, + {file = "pylint-3.3.6.tar.gz", hash = "sha256:b634a041aac33706d56a0d217e6587228c66427e20ec21a019bc4cdee48c040a"}, ] [package.dependencies] -astroid = ">=3.3.5,<=3.4.0-dev0" +astroid = ">=3.3.8,<=3.4.0.dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +isort = ">=4.2.5,<5.13 || >5.13,<7" mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +platformdirs = ">=2.2" +tomli = {version = ">=1.1", markers = "python_version < \"3.11\""} tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] spelling = ["pyenchant (>=3.2,<4.0)"] @@ -1471,13 +1421,13 @@ six = ">=1.5" [[package]] name = "python-dotenv" -version = "1.0.1" +version = "1.1.0" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, + {file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"}, + {file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"}, ] [package.extras] @@ -1485,13 +1435,13 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2024.2" +version = "2025.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] [[package]] @@ -1558,18 +1508,19 @@ files = [ [[package]] name = "referencing" -version = "0.35.1" +version = "0.36.2" description = "JSON Referencing + Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, + {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, + {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, ] [package.dependencies] attrs = ">=22.2.0" rpds-py = ">=0.7.0" +typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} [[package]] name = "requests" @@ -1607,114 +1558,125 @@ docutils = ">=0.11,<1.0" [[package]] name = "rpds-py" -version = "0.22.3" +version = "0.24.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" files = [ - {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, - {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, - {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, - {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, - {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, - {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, - {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, - {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, - {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, - {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, - {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, - {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, - {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, + {file = "rpds_py-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:006f4342fe729a368c6df36578d7a348c7c716be1da0a1a0f86e3021f8e98724"}, + {file = "rpds_py-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2d53747da70a4e4b17f559569d5f9506420966083a31c5fbd84e764461c4444b"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8acd55bd5b071156bae57b555f5d33697998752673b9de554dd82f5b5352727"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7e80d375134ddb04231a53800503752093dbb65dad8dabacce2c84cccc78e964"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60748789e028d2a46fc1c70750454f83c6bdd0d05db50f5ae83e2db500b34da5"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e1daf5bf6c2be39654beae83ee6b9a12347cb5aced9a29eecf12a2d25fff664"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b221c2457d92a1fb3c97bee9095c874144d196f47c038462ae6e4a14436f7bc"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:66420986c9afff67ef0c5d1e4cdc2d0e5262f53ad11e4f90e5e22448df485bf0"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:43dba99f00f1d37b2a0265a259592d05fcc8e7c19d140fe51c6e6f16faabeb1f"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a88c0d17d039333a41d9bf4616bd062f0bd7aa0edeb6cafe00a2fc2a804e944f"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc31e13ce212e14a539d430428cd365e74f8b2d534f8bc22dd4c9c55b277b875"}, + {file = "rpds_py-0.24.0-cp310-cp310-win32.whl", hash = "sha256:fc2c1e1b00f88317d9de6b2c2b39b012ebbfe35fe5e7bef980fd2a91f6100a07"}, + {file = "rpds_py-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0145295ca415668420ad142ee42189f78d27af806fcf1f32a18e51d47dd2052"}, + {file = "rpds_py-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2d3ee4615df36ab8eb16c2507b11e764dcc11fd350bbf4da16d09cda11fcedef"}, + {file = "rpds_py-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e13ae74a8a3a0c2f22f450f773e35f893484fcfacb00bb4344a7e0f4f48e1f97"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf86f72d705fc2ef776bb7dd9e5fbba79d7e1f3e258bf9377f8204ad0fc1c51e"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c43583ea8517ed2e780a345dd9960896afc1327e8cf3ac8239c167530397440d"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cd031e63bc5f05bdcda120646a0d32f6d729486d0067f09d79c8db5368f4586"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34d90ad8c045df9a4259c47d2e16a3f21fdb396665c94520dbfe8766e62187a4"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e838bf2bb0b91ee67bf2b889a1a841e5ecac06dd7a2b1ef4e6151e2ce155c7ae"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04ecf5c1ff4d589987b4d9882872f80ba13da7d42427234fce8f22efb43133bc"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:630d3d8ea77eabd6cbcd2ea712e1c5cecb5b558d39547ac988351195db433f6c"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ebcb786b9ff30b994d5969213a8430cbb984cdd7ea9fd6df06663194bd3c450c"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:174e46569968ddbbeb8a806d9922f17cd2b524aa753b468f35b97ff9c19cb718"}, + {file = "rpds_py-0.24.0-cp311-cp311-win32.whl", hash = "sha256:5ef877fa3bbfb40b388a5ae1cb00636a624690dcb9a29a65267054c9ea86d88a"}, + {file = "rpds_py-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:e274f62cbd274359eff63e5c7e7274c913e8e09620f6a57aae66744b3df046d6"}, + {file = "rpds_py-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d8551e733626afec514b5d15befabea0dd70a343a9f23322860c4f16a9430205"}, + {file = "rpds_py-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e374c0ce0ca82e5b67cd61fb964077d40ec177dd2c4eda67dba130de09085c7"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69d003296df4840bd445a5d15fa5b6ff6ac40496f956a221c4d1f6f7b4bc4d9"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8212ff58ac6dfde49946bea57474a386cca3f7706fc72c25b772b9ca4af6b79e"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:528927e63a70b4d5f3f5ccc1fa988a35456eb5d15f804d276709c33fc2f19bda"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a824d2c7a703ba6daaca848f9c3d5cb93af0505be505de70e7e66829affd676e"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d51febb7a114293ffd56c6cf4736cb31cd68c0fddd6aa303ed09ea5a48e029"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fab5f4a2c64a8fb64fc13b3d139848817a64d467dd6ed60dcdd6b479e7febc9"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9be4f99bee42ac107870c61dfdb294d912bf81c3c6d45538aad7aecab468b6b7"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:564c96b6076a98215af52f55efa90d8419cc2ef45d99e314fddefe816bc24f91"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:75a810b7664c17f24bf2ffd7f92416c00ec84b49bb68e6a0d93e542406336b56"}, + {file = "rpds_py-0.24.0-cp312-cp312-win32.whl", hash = "sha256:f6016bd950be4dcd047b7475fdf55fb1e1f59fc7403f387be0e8123e4a576d30"}, + {file = "rpds_py-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:998c01b8e71cf051c28f5d6f1187abbdf5cf45fc0efce5da6c06447cba997034"}, + {file = "rpds_py-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2d8e4508e15fc05b31285c4b00ddf2e0eb94259c2dc896771966a163122a0c"}, + {file = "rpds_py-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f00c16e089282ad68a3820fd0c831c35d3194b7cdc31d6e469511d9bffc535c"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951cc481c0c395c4a08639a469d53b7d4afa252529a085418b82a6b43c45c240"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9ca89938dff18828a328af41ffdf3902405a19f4131c88e22e776a8e228c5a8"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed0ef550042a8dbcd657dfb284a8ee00f0ba269d3f2286b0493b15a5694f9fe8"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2356688e5d958c4d5cb964af865bea84db29971d3e563fb78e46e20fe1848b"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78884d155fd15d9f64f5d6124b486f3d3f7fd7cd71a78e9670a0f6f6ca06fb2d"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a4a535013aeeef13c5532f802708cecae8d66c282babb5cd916379b72110cf7"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:84e0566f15cf4d769dade9b366b7b87c959be472c92dffb70462dd0844d7cbad"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:823e74ab6fbaa028ec89615ff6acb409e90ff45580c45920d4dfdddb069f2120"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c61a2cb0085c8783906b2f8b1f16a7e65777823c7f4d0a6aaffe26dc0d358dd9"}, + {file = "rpds_py-0.24.0-cp313-cp313-win32.whl", hash = "sha256:60d9b630c8025b9458a9d114e3af579a2c54bd32df601c4581bd054e85258143"}, + {file = "rpds_py-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:6eea559077d29486c68218178ea946263b87f1c41ae7f996b1f30a983c476a5a"}, + {file = "rpds_py-0.24.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:d09dc82af2d3c17e7dd17120b202a79b578d79f2b5424bda209d9966efeed114"}, + {file = "rpds_py-0.24.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5fc13b44de6419d1e7a7e592a4885b323fbc2f46e1f22151e3a8ed3b8b920405"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c347a20d79cedc0a7bd51c4d4b7dbc613ca4e65a756b5c3e57ec84bd43505b47"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20f2712bd1cc26a3cc16c5a1bfee9ed1abc33d4cdf1aabd297fe0eb724df4272"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad911555286884be1e427ef0dc0ba3929e6821cbeca2194b13dc415a462c7fd"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aeb3329c1721c43c58cae274d7d2ca85c1690d89485d9c63a006cb79a85771a"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a0f156e9509cee987283abd2296ec816225145a13ed0391df8f71bf1d789e2d"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa6800adc8204ce898c8a424303969b7aa6a5e4ad2789c13f8648739830323b7"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a18fc371e900a21d7392517c6f60fe859e802547309e94313cd8181ad9db004d"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9168764133fd919f8dcca2ead66de0105f4ef5659cbb4fa044f7014bed9a1797"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f6e3cec44ba05ee5cbdebe92d052f69b63ae792e7d05f1020ac5e964394080c"}, + {file = "rpds_py-0.24.0-cp313-cp313t-win32.whl", hash = "sha256:8ebc7e65ca4b111d928b669713865f021b7773350eeac4a31d3e70144297baba"}, + {file = "rpds_py-0.24.0-cp313-cp313t-win_amd64.whl", hash = "sha256:675269d407a257b8c00a6b58205b72eec8231656506c56fd429d924ca00bb350"}, + {file = "rpds_py-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a36b452abbf29f68527cf52e181fced56685731c86b52e852053e38d8b60bc8d"}, + {file = "rpds_py-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b3b397eefecec8e8e39fa65c630ef70a24b09141a6f9fc17b3c3a50bed6b50e"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdabcd3beb2a6dca7027007473d8ef1c3b053347c76f685f5f060a00327b8b65"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5db385bacd0c43f24be92b60c857cf760b7f10d8234f4bd4be67b5b20a7c0b6b"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8097b3422d020ff1c44effc40ae58e67d93e60d540a65649d2cdaf9466030791"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493fe54318bed7d124ce272fc36adbf59d46729659b2c792e87c3b95649cdee9"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8aa362811ccdc1f8dadcc916c6d47e554169ab79559319ae9fae7d7752d0d60c"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d8f9a6e7fd5434817526815f09ea27f2746c4a51ee11bb3439065f5fc754db58"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8205ee14463248d3349131bb8099efe15cd3ce83b8ef3ace63c7e976998e7124"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:921ae54f9ecba3b6325df425cf72c074cd469dea843fb5743a26ca7fb2ccb149"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32bab0a56eac685828e00cc2f5d1200c548f8bc11f2e44abf311d6b548ce2e45"}, + {file = "rpds_py-0.24.0-cp39-cp39-win32.whl", hash = "sha256:f5c0ed12926dec1dfe7d645333ea59cf93f4d07750986a586f511c0bc61fe103"}, + {file = "rpds_py-0.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:afc6e35f344490faa8276b5f2f7cbf71f88bc2cda4328e00553bd451728c571f"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:619ca56a5468f933d940e1bf431c6f4e13bef8e688698b067ae68eb4f9b30e3a"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b28e5122829181de1898c2c97f81c0b3246d49f585f22743a1246420bb8d399"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e5ab32cf9eb3647450bc74eb201b27c185d3857276162c101c0f8c6374e098"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:208b3a70a98cf3710e97cabdc308a51cd4f28aa6e7bb11de3d56cd8b74bab98d"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbc4362e06f950c62cad3d4abf1191021b2ffaf0b31ac230fbf0526453eee75e"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebea2821cdb5f9fef44933617be76185b80150632736f3d76e54829ab4a3b4d1"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4df06c35465ef4d81799999bba810c68d29972bf1c31db61bfdb81dd9d5bb"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3aa13bdf38630da298f2e0d77aca967b200b8cc1473ea05248f6c5e9c9bdb44"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:041f00419e1da7a03c46042453598479f45be3d787eb837af382bfc169c0db33"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8754d872a5dfc3c5bf9c0e059e8107451364a30d9fd50f1f1a85c4fb9481164"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:896c41007931217a343eff197c34513c154267636c8056fb409eafd494c3dcdc"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:92558d37d872e808944c3c96d0423b8604879a3d1c86fdad508d7ed91ea547d5"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f9e0057a509e096e47c87f753136c9b10d7a91842d8042c2ee6866899a717c0d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d6e109a454412ab82979c5b1b3aee0604eca4bbf9a02693bb9df027af2bfa91a"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1c892b1ec1f8cbd5da8de287577b455e388d9c328ad592eabbdcb6fc93bee5"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c39438c55983d48f4bb3487734d040e22dad200dab22c41e331cee145e7a50d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d7e8ce990ae17dda686f7e82fd41a055c668e13ddcf058e7fb5e9da20b57793"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ea7f4174d2e4194289cb0c4e172d83e79a6404297ff95f2875cf9ac9bced8ba"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb2954155bb8f63bb19d56d80e5e5320b61d71084617ed89efedb861a684baea"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04f2b712a2206e13800a8136b07aaedc23af3facab84918e7aa89e4be0260032"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:eda5c1e2a715a4cbbca2d6d304988460942551e4e5e3b7457b50943cd741626d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:9abc80fe8c1f87218db116016de575a7998ab1629078c90840e8d11ab423ee25"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a727fd083009bc83eb83d6950f0c32b3c94c8b80a9b667c87f4bd1274ca30ba"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e0f3ef95795efcd3b2ec3fe0a5bcfb5dadf5e3996ea2117427e524d4fbf309c6"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2c13777ecdbbba2077670285dd1fe50828c8742f6a4119dbef6f83ea13ad10fb"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e8d804c2ccd618417e96720ad5cd076a86fa3f8cb310ea386a3e6229bae7d1"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd822f019ccccd75c832deb7aa040bb02d70a92eb15a2f16c7987b7ad4ee8d83"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0047638c3aa0dbcd0ab99ed1e549bbf0e142c9ecc173b6492868432d8989a046"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5b66d1b201cc71bc3081bc2f1fc36b0c1f268b773e03bbc39066651b9e18391"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbcbb6db5582ea33ce46a5d20a5793134b5365110d84df4e30b9d37c6fd40ad3"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63981feca3f110ed132fd217bf7768ee8ed738a55549883628ee3da75bb9cb78"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3a55fc10fdcbf1a4bd3c018eea422c52cf08700cf99c28b5cb10fe97ab77a0d3"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:c30ff468163a48535ee7e9bf21bd14c7a81147c0e58a36c1078289a8ca7af0bd"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:369d9c6d4c714e36d4a03957b4783217a3ccd1e222cdd67d464a3a479fc17796"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:24795c099453e3721fda5d8ddd45f5dfcc8e5a547ce7b8e9da06fecc3832e26f"}, + {file = "rpds_py-0.24.0.tar.gz", hash = "sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e"}, ] [[package]] @@ -1767,19 +1729,19 @@ files = [ [[package]] name = "smartnoise-sql" -version = "1.0.4" +version = "1.0.6" description = "Differentially Private SQL Queries" optional = false -python-versions = "<3.13,>=3.8" +python-versions = "<3.14,>=3.8" files = [ - {file = "smartnoise-sql-1.0.4.tar.gz", hash = "sha256:93b5265df51e0e43388613a2ffea5a52b8d3772908f3ac78bd8bb2c7d121f01f"}, - {file = "smartnoise_sql-1.0.4-py3-none-any.whl", hash = "sha256:9dffc020b9e96ccd99ac746933e9a9ff07222505836f6ea21f3f0dca02917a7f"}, + {file = "smartnoise_sql-1.0.6-py3-none-any.whl", hash = "sha256:95f29f3eef7527d99d9f0ddbc89e4a6ce069c2748c3e729b9efb5288dcba3b1c"}, + {file = "smartnoise_sql-1.0.6.tar.gz", hash = "sha256:680d909fefd67453ed4d33d63be4cae0ceecc15987a3ea9c7520eff66d059d8e"}, ] [package.dependencies] antlr4-python3-runtime = "4.9.3" -graphviz = ">=0.17,<0.18" -opendp = ">=0.8.0,<0.11.0" +graphviz = ">=0.17,<1.0" +opendp = ">=0.8.0,<0.13.0" pandas = ">=2.0.1,<3.0.0" PyYAML = ">=6.0.1,<7.0.0" sqlalchemy = ">=2.0.0,<3.0.0" @@ -1812,7 +1774,6 @@ babel = ">=2.9" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} docutils = ">=0.14,<0.20" imagesize = ">=1.3" -importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" packaging = ">=21.0" Pygments = ">=2.12" @@ -1974,80 +1935,80 @@ test = ["pytest"] [[package]] name = "sqlalchemy" -version = "2.0.36" +version = "2.0.40" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, - {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, - {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win32.whl", hash = "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win_amd64.whl", hash = "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win32.whl", hash = "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win_amd64.whl", hash = "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win32.whl", hash = "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win_amd64.whl", hash = "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win32.whl", hash = "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win_amd64.whl", hash = "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win32.whl", hash = "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win_amd64.whl", hash = "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win32.whl", hash = "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win_amd64.whl", hash = "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win32.whl", hash = "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win_amd64.whl", hash = "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870"}, + {file = "sqlalchemy-2.0.40-py3-none-any.whl", hash = "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a"}, + {file = "sqlalchemy-2.0.40.tar.gz", hash = "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = ">=1", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] @@ -2058,7 +2019,7 @@ mysql-connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] @@ -2234,24 +2195,24 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.20240917" +version = "6.0.12.20250402" description = "Typing stubs for PyYAML" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, - {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, + {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"}, + {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"}, ] [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] [[package]] @@ -2271,24 +2232,24 @@ typing-extensions = ">=3.7.4" [[package]] name = "tzdata" -version = "2024.2" +version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, ] [[package]] name = "urllib3" -version = "2.2.3" +version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] @@ -2299,13 +2260,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.28.0" +version = "20.30.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, - {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, + {file = "virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6"}, + {file = "virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8"}, ] [package.dependencies] @@ -2329,28 +2290,97 @@ files = [ ] [[package]] -name = "zipp" -version = "3.21.0" -description = "Backport of pathlib-compatible object wrapper for zip files" +name = "wrapt" +version = "1.17.2" +description = "Module for decorators, wrappers and monkey patching." optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, + {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, + {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, + {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, + {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, + {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, + {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, + {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, + {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, + {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, + {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, + {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, + {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, + {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, + {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, + {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, + {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, ] -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [extras] docs = ["sphinx-rtd-theme", "sphinxcontrib-napoleon"] [metadata] lock-version = "2.0" -python-versions = "^3.9,<3.13" -content-hash = "c87bf05633a11e794207a9c7b44893f331b3236ecc51b81c3a7aab97c82275db" +python-versions = ">=3.10,<3.14" +content-hash = "70f8f1b1a164704cecab89ec9d5d8e1877d8dd0a7d1b0488dc0e1c3b4732b82a" diff --git a/pyproject.toml b/pyproject.toml index dccf8a08..2aa5bc28 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,11 +16,11 @@ homepage = "https://github.com/alan-turing-institute/sqlsynthgen" documentation = "https://sqlsynthgen.readthedocs.io/en/stable/" [tool.poetry.dependencies] -python = "^3.9,<3.13" +python = ">=3.10,<3.14" pydantic = {extras = ["dotenv"], version = "^1.10.2"} psycopg2-binary = "^2.9.5" sqlalchemy-utils = "^0.41.2" -mimesis = "^6.1.1" +mimesis = "^18.0.0" typer = "^0.9.0" pyyaml = "^6.0" sqlalchemy = "^2" diff --git a/sqlsynthgen/create.py b/sqlsynthgen/create.py index cfd55dbe..e26f1534 100644 --- a/sqlsynthgen/create.py +++ b/sqlsynthgen/create.py @@ -1,7 +1,7 @@ """Functions and classes to create and populate the target database.""" from collections import Counter import random -from typing import Any, Generator, Mapping, Sequence, Tuple +from typing import Any, Generator, Iterable, Iterator, Mapping, Sequence, Tuple from sqlalchemy import Connection, insert from sqlalchemy.exc import IntegrityError @@ -116,47 +116,84 @@ def create_db_data( return row_counts -def _populate_story( - story: Story, - table_dict: Mapping[str, Table], - table_generator_dict: Mapping[str, TableGenerator], - dst_conn: Connection, -) -> RowCounts: - """Write to the database all the rows created by the given story.""" - # Loop over the rows generated by the story, insert them into their - # respective tables. Ideally this would say - # `for table_name, provided_values in story:` - # but we have to loop more manually to be able to use the `send` function. - row_counts: Counter[str] = Counter() - try: - table_name, provided_values = next(story) +class StoryIterator: + def __init__(self, + stories: Iterable[tuple[str, Story]], + table_dict: Mapping[str, Table], + table_generator_dict: Mapping[str, TableGenerator], + dst_conn: Connection, + ): + self._stories: Iterator[tuple[str, Story]] = iter(stories) + self._table_dict: Mapping[str, Table] = table_dict + self._table_generator_dict: Mapping[str, TableGenerator] = table_generator_dict + self._dst_conn: Connection = dst_conn + try: + name, self._story = next(self._stories) + logger.info("Generating data for story '%s'", name) + self._table_name, self._provided_values = next(self._story) + except StopIteration: + self._table_name = None + + def is_ended(self) -> bool: + """ + Do we have another row to process? + If so, insert() can be called. + """ + return self._table_name is None + + def has_table(self, table_name: str): + """ + Do we have a row for table table_name? + """ + return table_name == self._table_name + + def table_name(self) -> str: + """ + The name of the current table (or None if no more stories to process) + """ + return self._table_name + + def insert(self) -> None: + """ + Perform the insert. Call this after __init__ or next, and after checking + that is_ended returns False. + """ + table = self._table_dict[self._table_name] + if table.name in self._table_generator_dict: + table_generator = self._table_generator_dict[table.name] + default_values = table_generator(self._dst_conn, random.random) + else: + default_values = {} + insert_values = {**default_values, **self._provided_values} + stmt = insert(table).values(insert_values).return_defaults() + cursor = self._dst_conn.execute(stmt) + # We need to return all the default values etc. to the generator, + # because other parts of the story may refer to them. + if cursor.returned_defaults: + # pylint: disable=protected-access + return_values = { + str(k): v for k, v in cursor.returned_defaults._mapping.items() + } + # pylint: enable=protected-access + else: + return_values = {} + self._final_values = {**insert_values, **return_values} + + def next(self) -> None: + """ + Advance to the next table row. + """ while True: - table = table_dict[table_name] - if table.name in table_generator_dict: - table_generator = table_generator_dict[table.name] - default_values = table_generator(dst_conn, random.random) - else: - default_values = {} - insert_values = {**default_values, **provided_values} - stmt = insert(table).values(insert_values).return_defaults() - cursor = dst_conn.execute(stmt) - # We need to return all the default values etc. to the generator, - # because other parts of the story may refer to them. - if cursor.returned_defaults: - # pylint: disable=protected-access - return_values = { - str(k): v for k, v in cursor.returned_defaults._mapping.items() - } - # pylint: enable=protected-access - else: - return_values = {} - final_values = {**insert_values, **return_values} - row_counts[table_name] = row_counts.get(table_name, 0) + 1 - table_name, provided_values = story.send(final_values) - except StopIteration: - # The story has finished, it has no more rows to generate - pass - return row_counts + try: + self._table_name, self._provided_values = self._story.send(self._final_values) + return + except StopIteration: + try: + name, self._story = next(self._stories) + logger.info("Generating data for story '%s'", name) + except StopIteration: + self._table_name = None + return def populate( @@ -182,16 +219,15 @@ def populate( ], [], ) - for name, story in stories: - # Run the inserts for each story within a transaction. - logger.debug('Generating data for story "%s".', name) - with dst_conn.begin(): - row_counts += _populate_story( - story, table_dict, table_generator_dict, dst_conn - ) + story_iterator = StoryIterator(stories, table_dict, table_generator_dict, dst_conn) # Generate individual rows, table by table. for table in tables: + # Do we have a story row to enter into this table? + if story_iterator.has_table(table.name): + story_iterator.insert() + row_counts[table.name] = row_counts.get(table.name, 0) + 1 + story_iterator.next() if table.name not in table_generator_dict: # We don't have a generator for this table continue @@ -205,4 +241,11 @@ def populate( stmt = insert(table).values(table_generator(dst_conn, random.random)) dst_conn.execute(stmt) row_counts[table.name] = row_counts.get(table.name, 0) + 1 + + # Insert any remaining stories + while not story_iterator.is_ended(): + story_iterator.insert() + row_counts[table.name] = row_counts.get(table.name, 0) + 1 + story_iterator.next() + return row_counts diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index 4b827133..fc294a37 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -110,8 +110,8 @@ class PredefinedGenerator(Generator): Generator built from an existing config.yaml. """ SELECT_AGGREGATE_RE = re.compile(r"SELECT (.*) FROM ([A-Za-z_][A-Za-z0-9_]*)") - AS_CLAUSE = re.compile(r" *(.+) +AS +([A-Za-z_][A-Za-z0-9_]*) *") - SRC_STAT_NAME = re.compile(r"SRC_STATS\[([^]]*)\].*") + AS_CLAUSE_RE = re.compile(r" *(.+) +AS +([A-Za-z_][A-Za-z0-9_]*) *") + SRC_STAT_NAME_RE = re.compile(r"SRC_STATS\[([^]]*)\].*") def __init__(self, table_name: str, generator_object: Mapping[str, any], config: Mapping[str, any]): """ @@ -119,14 +119,19 @@ def __init__(self, table_name: str, generator_object: Mapping[str, any], config: :param config: The entire configuration. :param generator_object: The part of the configuration at tables.*.row_generators """ + logger.debug("Creating a PredefinedGenerator %s from table %s", generator_object["name"], table_name) self._table_name = table_name self._name: str = generator_object["name"] self._kwn: dict[str, str] = generator_object.get("kwargs", {}) self._src_stats_mentioned = set() for kwnv in self._kwn.values(): - ss = self.SRC_STAT_NAME.match(kwnv) + ss = self.SRC_STAT_NAME_RE.match(kwnv) if ss: - self._src_stats_mentioned.add(ss.group(1)) + ss_name = ss.group(1) + self._src_stats_mentioned.add(ss_name) + logger.debug("Found SRC_STATS reference %s", ss_name) + else: + logger.debug("Value %s does not seem to be a SRC_STATS reference", kwnv) # Need to deal with this somehow (or remove it from the schema) self._argn: list[str] = generator_object.get("args", []) self._select_aggregate_clauses = {} @@ -141,16 +146,19 @@ def __init__(self, table_name: str, generator_object: Mapping[str, any], config: sam = None if query is None else self.SELECT_AGGREGATE_RE.match(query) if sam and qname in tables and qname == sam.group(2): sacs = [ - self.AS_CLAUSE.match(clause) + self.AS_CLAUSE_RE.match(clause) for clause in sam.group(1).split(',') ] - self._select_aggregate_clauses = { + self._select_aggregate_clauses.update({ sac.group(2): sac.group(1) for sac in sacs if sac is not None - } + }) elif name in self._src_stats_mentioned: + logger.debug("Custom query %s is '%s'", name, query) self._custom_queries[name] = query + else: + logger.debug("Could not parse %s query %s", name, query) def function_name(self) -> str: return self._name @@ -220,7 +228,7 @@ def make_buckets(_cls, engine: Engine, table_name: str, column_name: str): column=column_name, )) ).first() - if result is None: + if result is None or result.stddev is None: return None return Buckets(engine, table_name, column_name, result.mean, result.stddev, result.count) @@ -337,8 +345,8 @@ def __init__(self, column: Column, engine: Engine, function_name: str): ).first() if result is None: return None - self._start = result.start - self._end = result.end + self._start = int(result.start) + self._end = int(result.end) def nominal_kwargs(self): return { "start": f'SRC_STATS["auto__{self._column.table.name}"]["{self._column.name}__start"]', @@ -469,10 +477,7 @@ def get_generators(self, column: Column, _engine: Engine): ct = column.type.as_generic() if not isinstance(ct, Numeric) and not isinstance(ct, Integer): return [] - return list(map(MimesisGenerator, [ - "person.weight", - "person.age", - ])) + return [MimesisGenerator("person.weight")] def fit_from_buckets(xs: list[float], ys: list[float]): @@ -544,6 +549,8 @@ def get_generators(self, column: Column, engine: Engine): column_name = column.name table_name = column.table.name buckets = Buckets.make_buckets(engine, table_name, column_name) + if buckets is None: + return [] return [ GaussianGenerator(table_name, column_name, buckets), UniformGenerator(table_name, column_name, buckets), diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 913f5cf1..e3d1913f 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -355,9 +355,16 @@ class GeneratorCmd(DbCmd): prompt = "(generatorconf) " file = None - def make_table_entry(self, table_name: str) -> TableEntry: + PROPOSE_SOURCE_SAMPLE_TEXT = "Sample of actual source data: {0}..." + PROPOSE_GENERATOR_SAMPLE_TEXT = "{index}. {name}: {fit} {sample} ..." + + def make_table_entry(self, table_name: str) -> TableEntry | None: tables = self.config.get("tables", {}) table: str = tables.get(table_name, {}) + if table.get("ignore", False): + return None + if table.get("vocabulary_table", False): + return None metadata_table = self.metadata.tables[table_name] columns = frozenset(metadata_table.columns.keys()) col2gen: dict[str, Generator] = {} @@ -591,8 +598,42 @@ def do_columns(self, _arg): "Report the column names" self.columnize(self.table_metadata().columns.keys()) - def do_next(self, _arg): - "Go to the next generator" + def get_table_index(self, table_name: str) -> int | None: + for n, entry in enumerate(self.table_entries): + if entry.name == table_name: + return n + return None + + def get_generator_index(self, table_index, column_name): + entry: GeneratorCmdTableEntry = self.table_entries[table_index] + for n, gen in enumerate(entry.generators): + if gen.column == column_name: + return n + return None + + def do_next(self, arg): + """ + Go to the next generator. + Or, go to a named table: 'next tablename'. + Or go to a column: 'next tablename.columnname'. + """ + if arg: + parts = arg.split(".", 1) + table_index = self.get_table_index(parts[0]) + if table_index is None: + self.print("No such (non-vocabulary, non-ignored) table name {0}", parts[0]) + return + gen_index = None + if 1 < len(parts): + gen_index = self.get_generator_index(table_index, parts[1]) + if gen_index is None: + self.print("we cannot set the generator for column {0}", parts[1]) + return + self.set_table_index(table_index) + if gen_index is not None: + self.generator_index = gen_index + self.set_prompt() + return table = self.get_table() if table is None: self.print("No more tables") @@ -753,7 +794,7 @@ def do_propose(self, arg): limit = 5 gens = self.get_generator_proposals() sample = self.get_column_data(limit) - self.print("Sample of actual source data: {0}...", ",".join(sample)) + self.print(self.PROPOSE_SOURCE_SAMPLE_TEXT, ",".join(sample)) for index, gen in enumerate(gens): fit = gen.fit() if fit is None: @@ -763,7 +804,7 @@ def do_propose(self, arg): else: fit_s = f"(fit: {fit:.0f})" self.print( - "{index}. {name}: {fit} {sample} ...", + self.PROPOSE_GENERATOR_SAMPLE_TEXT, index=index + 1, name=gen.function_name(), fit=fit_s, diff --git a/tests/test_create.py b/tests/test_create.py index 5005758a..a43d060a 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -12,7 +12,7 @@ from sqlsynthgen.base import FileUploader, TableGenerator from sqlsynthgen.create import ( Story, - _populate_story, + StoryIterator, create_db_data, create_db_tables, create_db_vocab, @@ -215,6 +215,9 @@ def my_story() -> Story: self.assertEqual(1, first_row["someval"]) self.assertEqual(8, first_row["otherval"]) + story_iterator = StoryIterator([my_story()], dict(self.metadata.tables), {}, conn) with self.engine.connect() as conn: with conn.begin(): - _populate_story(my_story(), dict(self.metadata.tables), {}, conn) + while not story_iterator.is_ended(): + story_iterator.insert() + story_iterator.next() diff --git a/tests/test_interactive.py b/tests/test_interactive.py index 3cee2b21..fe726ee0 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -1,4 +1,5 @@ """Tests for the base module.""" +import copy from sqlalchemy import MetaData, select from sqlalchemy.orm import declarative_base @@ -295,3 +296,54 @@ def test_set_generator_choice(self): "name": f"auto__{TABLE}__{COLUMN}", "query": f"SELECT {COLUMN} AS value FROM {TABLE} GROUP BY value ORDER BY COUNT({COLUMN}) DESC", }) + + def test_old_generators_remain(self): + """ Test that we can set one generator and keep an old one. """ + metadata = MetaData() + metadata.reflect(self.engine) + config = { + "tables": { + "string": { + "row_generators": [{ + "name": "dist_gen.normal", + "columns_assigned": ["frequency"], + "kwargs": { + "mean": 'SRC_STATS["auto__string"]["mean__frequency"]', + "sd": 'SRC_STATS["auto__string"]["stddev__frequency"]', + }, + }] + } + }, + "src-stats": [{ + "name": "auto__string", + "query": 'SELECT AVG(frequency) AS mean__frequency, STDDEV(frequency) AS stddev__frequency FROM string', + }] + } + with TestGeneratorCmd(self.dsn, self.schema_name, metadata, copy.deepcopy(config)) as gc: + TABLE = "model" + COLUMN = "name" + GENERATOR = "person.first_name" + gc.do_next(f"{TABLE}.{COLUMN}") + gc.do_propose("") + proposals = gc.get_proposals() + gc.do_set(str(proposals[f"generic.{GENERATOR}"][0])) + gc.do_quit("") + self.assertEqual(len(gc.config["tables"][TABLE]["row_generators"]), 1) + self.assertDictEqual( + gc.config["tables"][TABLE]["row_generators"][0], + {"name": f"generic.{GENERATOR}", "columns_assigned": [COLUMN]}, + ) + row_gens = gc.config["tables"]["string"]["row_generators"] + self.assertEqual(len(row_gens), 1) + row_gen = row_gens[0] + self.assertEqual(row_gen["name"], "dist_gen.normal") + self.assertListEqual(row_gen["columns_assigned"], ["frequency"]) + self.assertDictEqual(row_gen["kwargs"], { + "mean": 'SRC_STATS["auto__string"]["mean__frequency"]', + "sd": 'SRC_STATS["auto__string"]["stddev__frequency"]', + }) + self.assertEqual(len(gc.config["src-stats"]), 1) + self.assertDictEqual(gc.config["src-stats"][0], { + "name": f"auto__string", + "query": f"SELECT AVG(frequency) AS mean__frequency, STDDEV(frequency) AS stddev__frequency FROM string", + }) From 39a7a7f516dc318c98d16b8e193a24242261c04a Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 16 Apr 2025 18:39:46 +0100 Subject: [PATCH 56/85] configure-generators writes out existing queries to config.yaml --- sqlsynthgen/generators.py | 17 +++++++++-------- sqlsynthgen/interactive.py | 8 +------- 2 files changed, 10 insertions(+), 15 deletions(-) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index fc294a37..af219b4e 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -111,7 +111,7 @@ class PredefinedGenerator(Generator): """ SELECT_AGGREGATE_RE = re.compile(r"SELECT (.*) FROM ([A-Za-z_][A-Za-z0-9_]*)") AS_CLAUSE_RE = re.compile(r" *(.+) +AS +([A-Za-z_][A-Za-z0-9_]*) *") - SRC_STAT_NAME_RE = re.compile(r"SRC_STATS\[([^]]*)\].*") + SRC_STAT_NAME_RE = re.compile(r'SRC_STATS\["([^]]*)"\].*') def __init__(self, table_name: str, generator_object: Mapping[str, any], config: Mapping[str, any]): """ @@ -136,15 +136,17 @@ def __init__(self, table_name: str, generator_object: Mapping[str, any], config: self._argn: list[str] = generator_object.get("args", []) self._select_aggregate_clauses = {} self._custom_queries = {} - tables = config.get("tables", {}) for sstat in config.get("src-stats", []): name: str = sstat["name"] dpq = sstat.get("dp-query", None) query = sstat.get("query", dpq) #... should not combine these probably? - if query and name.startswith("auto__"): - qname = name[6:] + if name in self._src_stats_mentioned: + logger.debug("Found a src-stats entry for %s", name) + # This query is one that this generator is interested in sam = None if query is None else self.SELECT_AGGREGATE_RE.match(query) - if sam and qname in tables and qname == sam.group(2): + # sam.group(2) is the table name from the FROM clause of the query + if sam and name == f"auto__{sam.group(2)}": + # name is auto__{table_name}, so it's a select_aggregate, so we split up its clauses sacs = [ self.AS_CLAUSE_RE.match(clause) for clause in sam.group(1).split(',') @@ -154,11 +156,10 @@ def __init__(self, table_name: str, generator_object: Mapping[str, any], config: for sac in sacs if sac is not None }) - elif name in self._src_stats_mentioned: + else: + # some other name, so must be a custom query logger.debug("Custom query %s is '%s'", name, query) self._custom_queries[name] = query - else: - logger.debug("Could not parse %s query %s", name, query) def function_name(self) -> str: return self._name diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index e3d1913f..2cc5b999 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -398,7 +398,7 @@ def make_table_entry(self, table_name: str) -> TableEntry | None: for name, col in metadata_table.columns.items(): gen = col2gen.get(name, None) generator_infos.append(GeneratorInfo( - column=name, + column=str(name), is_primary_key=col.primary_key, old_gen=gen, new_gen=gen, @@ -488,12 +488,6 @@ def set_prompt(self): column=column, ) - def set_generator(self, generator: str): - if self.table_index < len(self.table_entries): - entry = self.table_entries[self.table_index] - if self.generator_index < len(entry.generators): - entry.generators[self.generator_index] = generator - def _remove_auto_src_stats(self) -> list[dict[str, any]]: src_stats = self.config.get("src-stats", []) new_src_stats = [] From 2f6ef58e016ff763aacdba0667df33b43cb6532f Mon Sep 17 00:00:00 2001 From: Tim Band Date: Mon, 21 Apr 2025 12:36:17 +0100 Subject: [PATCH 57/85] select aggregate clauses merge --- sqlsynthgen/interactive.py | 29 +++++++++------- tests/test_interactive.py | 69 ++++++++++++++++++++++++++++++++++++-- 2 files changed, 84 insertions(+), 14 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 2cc5b999..7686d55c 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -13,6 +13,9 @@ logger = logging.getLogger(__name__) +def or_default(v, d): + return d if v is None else v + class TableType(Enum): NORMAL = "normal" IGNORE = "ignore" @@ -503,14 +506,10 @@ def _copy_entries(self) -> None: tes: list[GeneratorCmdTableEntry] = self.table_entries for entry in tes: rgs = [] + new_gens: list[Generator] = [] for generator in entry.generators: if generator.new_gen is not None: - sacs = generator.new_gen.select_aggregate_clauses() - if sacs: - src_stats.append({ - "name": f"auto__{entry.name}", - "query": self._get_aggregate_query(generator.new_gen, entry.name), - }) + new_gens.append(generator.new_gen) cqs = generator.new_gen.custom_queries() for cq_key, cq in cqs.items(): src_stats.append({ @@ -527,6 +526,12 @@ def _copy_entries(self) -> None: rgs.append(rg) if entry.name not in tables: tables[entry.name] = {} + aq = self._get_aggregate_query(new_gens, entry.name) + if aq: + src_stats.append({ + "name": f"auto__{entry.name}", + "query": aq, + }) if rgs: tables[entry.name]["row_generators"] = rgs elif "row_generators" in tables[entry.name]: @@ -725,14 +730,14 @@ def print_custom_queries(self, gen: Generator) -> None: for cq_key, cq in cqs.items(): self.print("{0}; providing the following values: {1}", cq, cq_key2args[cq_key]) - def _get_aggregate_query(self, gen: Generator, table_name: str) -> str | None: - sacs = gen.select_aggregate_clauses() - if not sacs: - return None + def _get_aggregate_query(self, gens: list[Generator], table_name: str) -> str | None: clauses = [ f"{q} AS {n}" - for n, q in sacs.items() + for gen in gens + for n, q in or_default(gen.select_aggregate_clauses(), {}).items() ] + if not clauses: + return None return f"SELECT {', '.join(clauses)} FROM {table_name}" def print_select_aggregate_query(self, table_name, gen: Generator) -> None: @@ -755,7 +760,7 @@ def print_select_aggregate_query(self, table_name, gen: Generator) -> None: logger.warning("actual_kwargs for %s does not report %s", gen.function_name(), ak) else: logger.warning('nominal_kwargs for %s does not have a value SRC_STATS["auto__%s"]["%s"]', gen.function_name(), table_name, n) - select_q = self._get_aggregate_query(gen, table_name) + select_q = self._get_aggregate_query([gen], table_name) self.print("{0}; providing the following values: {1}", select_q, vals) def get_column_data(self, count: int, to_str=repr, min_length: int = 0): diff --git a/tests/test_interactive.py b/tests/test_interactive.py index fe726ee0..7a81c9a0 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -1,5 +1,6 @@ """Tests for the base module.""" import copy +import re from sqlalchemy import MetaData, select from sqlalchemy.orm import declarative_base @@ -344,6 +345,70 @@ def test_old_generators_remain(self): }) self.assertEqual(len(gc.config["src-stats"]), 1) self.assertDictEqual(gc.config["src-stats"][0], { - "name": f"auto__string", - "query": f"SELECT AVG(frequency) AS mean__frequency, STDDEV(frequency) AS stddev__frequency FROM string", + "name": "auto__string", + "query": "SELECT AVG(frequency) AS mean__frequency, STDDEV(frequency) AS stddev__frequency FROM string", + }) + + def test_aggregate_queries_merge(self): + """ + Test that we can set a generator that requires select aggregate clauses + and keep an old one, resulting in a merged query. + """ + metadata = MetaData() + metadata.reflect(self.engine) + config = { + "tables": { + "string": { + "row_generators": [{ + "name": "dist_gen.normal", + "columns_assigned": ["frequency"], + "kwargs": { + "mean": 'SRC_STATS["auto__string"]["mean__frequency"]', + "sd": 'SRC_STATS["auto__string"]["stddev__frequency"]', + }, + }] + } + }, + "src-stats": [{ + "name": "auto__string", + "query": 'SELECT AVG(frequency) AS mean__frequency, STDDEV(frequency) AS stddev__frequency FROM string', + }] + } + with TestGeneratorCmd(self.dsn, self.schema_name, metadata, copy.deepcopy(config)) as gc: + COLUMN = "position" + GENERATOR = "dist_gen.uniform_ms" + gc.do_next(f"string.{COLUMN}") + gc.do_propose("") + proposals = gc.get_proposals() + gc.do_set(str(proposals[f"{GENERATOR}"][0])) + gc.do_quit("") + row_gens: list[dict[str,any]] = gc.config["tables"]["string"]["row_generators"] + self.assertEqual(len(row_gens), 2) + if row_gens[0]["name"] == GENERATOR: + row_gen0 = row_gens[0] + row_gen1 = row_gens[1] + else: + row_gen0 = row_gens[1] + row_gen1 = row_gens[0] + self.assertEqual(row_gen0["name"], GENERATOR) + self.assertEqual(row_gen1["name"], "dist_gen.normal") + self.assertListEqual(row_gen0["columns_assigned"], [COLUMN]) + self.assertDictEqual(row_gen0["kwargs"], { + "mean": f'SRC_STATS["auto__string"]["mean__{COLUMN}"]', + "sd": f'SRC_STATS["auto__string"]["stddev__{COLUMN}"]', + }) + self.assertListEqual(row_gen1["columns_assigned"], ["frequency"]) + self.assertDictEqual(row_gen1["kwargs"], { + "mean": 'SRC_STATS["auto__string"]["mean__frequency"]', + "sd": 'SRC_STATS["auto__string"]["stddev__frequency"]', + }) + self.assertEqual(len(gc.config["src-stats"]), 1) + self.assertEqual(gc.config["src-stats"][0]["name"], "auto__string") + select_match = re.match(r'SELECT (.*) FROM string', gc.config["src-stats"][0]["query"]) + self.assertIsNotNone(select_match, "src_stats[0].query is not an aggregate select") + self.assertSetEqual(set(select_match.group(1).split(", ")), { + "AVG(frequency) AS mean__frequency", + "STDDEV(frequency) AS stddev__frequency", + f"AVG({COLUMN}) AS mean__{COLUMN}", + f"STDDEV({COLUMN}) AS stddev__{COLUMN}", }) From b5a051f02174c2ec741c3bfe9c5f9994487a8c2f Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 23 Apr 2025 09:09:38 +0100 Subject: [PATCH 58/85] #12 tab completion for configure_generators' next --- sqlsynthgen/interactive.py | 25 ++++++++++++++++++++++++- tests/test_interactive.py | 24 ++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 7686d55c..131e5f8d 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -623,7 +623,7 @@ def do_next(self, arg): self.print("No such (non-vocabulary, non-ignored) table name {0}", parts[0]) return gen_index = None - if 1 < len(parts): + if 1 < len(parts) and parts[1]: gen_index = self.get_generator_index(table_index, parts[1]) if gen_index is None: self.print("we cannot set the generator for column {0}", parts[1]) @@ -643,6 +643,29 @@ def do_next(self, arg): self.generator_index = next_gi self.set_prompt() + def complete_next(self, text: str, _line: str, _begidx: int, _endidx: int): + parts = text.split(".", 1) + table_name = parts[0] + if 1 < len(parts): + column_name = parts[1] + table_index = self.get_table_index(table_name) + if table_index is None: + return [] + table_entry: GeneratorCmdTableEntry = self.table_entries[table_index] + return [ + f"{table_name}.{gen.column}" + for gen in table_entry.generators + if gen.column.startswith(column_name) + ] + table_names = [ + entry.name + for entry in self.table_entries + if entry.name.startswith(table_name) + ] + if table_name in table_names: + table_names.append(f"{table_name}.") + return table_names + def do_previous(self, _arg): "Go to the previous generator" if self.generator_index == 0: diff --git a/tests/test_interactive.py b/tests/test_interactive.py index 7a81c9a0..98ddaef2 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -412,3 +412,27 @@ def test_aggregate_queries_merge(self): f"AVG({COLUMN}) AS mean__{COLUMN}", f"STDDEV({COLUMN}) AS stddev__{COLUMN}", }) + + def test_next_completion(self): + """ Test tab completion for the next command. """ + metadata = MetaData() + metadata.reflect(self.engine) + with TestGeneratorCmd(self.dsn, self.schema_name, metadata, {}) as gc: + self.assertSetEqual( + set(gc.complete_next("m", "next m", 5, 6)), + {"manufacturer", "model"}, + ) + self.assertSetEqual( + set(gc.complete_next("model", "next model", 5, 10)), + {"model", "model."}, + ) + self.assertSetEqual( + set(gc.complete_next("string.", "next string.", 5, 11)), + {"string.id", "string.model_id", "string.position", "string.frequency"}, + ) + self.assertSetEqual( + set(gc.complete_next("string.p", "next string.p", 5, 12)), + {"string.position"}, + ) + self.assertListEqual(gc.complete_next("string.q", "next string.q", 5, 12), []) + self.assertListEqual(gc.complete_next("ww", "next ww", 5, 7), []) From 8d5eded92c286dee217835768b74890fdde2ea64 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 23 Apr 2025 23:15:24 +0100 Subject: [PATCH 59/85] #2 configure-generators' compare command reports the privacy of the table it is configuring --- sqlsynthgen/interactive.py | 55 +++++++++++++++++++++++++------------- sqlsynthgen/utils.py | 24 +++++++++++++++++ tests/test_interactive.py | 36 +++++++++++++++++++++++-- 3 files changed, 94 insertions(+), 21 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 131e5f8d..2b589a23 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -9,7 +9,7 @@ from sqlalchemy import Column, MetaData, Table, text from sqlsynthgen.generators import everything_factory, Generator, PredefinedGenerator -from sqlsynthgen.utils import create_db_engine +from sqlsynthgen.utils import create_db_engine, primary_private_fks, table_is_private logger = logging.getLogger(__name__) @@ -360,6 +360,9 @@ class GeneratorCmd(DbCmd): PROPOSE_SOURCE_SAMPLE_TEXT = "Sample of actual source data: {0}..." PROPOSE_GENERATOR_SAMPLE_TEXT = "{index}. {name}: {fit} {sample} ..." + PRIMARY_PRIVATE_TEXT = "Primary Private" + SECONDARY_PRIVATE_TEXT = "Secondary Private on columns {0}" + NOT_PRIVATE_TEXT = "Not private" def make_table_entry(self, table_name: str) -> TableEntry | None: tables = self.config.get("tables", {}) @@ -597,13 +600,13 @@ def do_columns(self, _arg): "Report the column names" self.columnize(self.table_metadata().columns.keys()) - def get_table_index(self, table_name: str) -> int | None: + def _get_table_index(self, table_name: str) -> int | None: for n, entry in enumerate(self.table_entries): if entry.name == table_name: return n return None - def get_generator_index(self, table_index, column_name): + def _get_generator_index(self, table_index, column_name): entry: GeneratorCmdTableEntry = self.table_entries[table_index] for n, gen in enumerate(entry.generators): if gen.column == column_name: @@ -618,13 +621,13 @@ def do_next(self, arg): """ if arg: parts = arg.split(".", 1) - table_index = self.get_table_index(parts[0]) + table_index = self._get_table_index(parts[0]) if table_index is None: self.print("No such (non-vocabulary, non-ignored) table name {0}", parts[0]) return gen_index = None if 1 < len(parts) and parts[1]: - gen_index = self.get_generator_index(table_index, parts[1]) + gen_index = self._get_generator_index(table_index, parts[1]) if gen_index is None: self.print("we cannot set the generator for column {0}", parts[1]) return @@ -648,7 +651,7 @@ def complete_next(self, text: str, _line: str, _begidx: int, _endidx: int): table_name = parts[0] if 1 < len(parts): column_name = parts[1] - table_index = self.get_table_index(table_name) + table_index = self._get_table_index(table_name) if table_index is None: return [] table_entry: GeneratorCmdTableEntry = self.table_entries[table_index] @@ -674,7 +677,7 @@ def do_previous(self, _arg): self.generator_index -= 1 self.set_prompt() - def get_generator_proposals(self) -> list[Generator]: + def _get_generator_proposals(self) -> list[Generator]: if self.generators_valid_indices != (self.table_index, self.generator_index): self.generators = None if self.generators is None: @@ -688,6 +691,19 @@ def get_generator_proposals(self) -> list[Generator]: self.generators_valid_indices = (self.table_index, self.generator_index) return self.generators + def _print_privacy(self): + table = self.table_metadata() + if table is None: + return + if table_is_private(self.config, table.name): + self.print(self.PRIMARY_PRIVATE_TEXT) + return + pfks = primary_private_fks(self.config, table) + if not pfks: + self.print(self.NOT_PRIVATE_TEXT) + return + self.print(self.SECONDARY_PRIVATE_TEXT, pfks) + def do_compare(self, arg: str): """ Compare the real data with some generators. @@ -697,12 +713,13 @@ def do_compare(self, arg: str): from generators 5, 6 and 10. You can find out which numbers correspond to which generators using the 'propose' command. """ + self._print_privacy() args = arg.split() limit = 20 comparison = { - "source": self.get_column_data(limit, to_str=str), + "source": self._get_column_data(limit, to_str=str), } - gens: list[Generator] = self.get_generator_proposals() + gens: list[Generator] = self._get_generator_proposals() table_name = self.table_name() for argument in args: if argument.isdigit(): @@ -710,10 +727,10 @@ def do_compare(self, arg: str): if 0 < n and n <= len(gens): gen = gens[n - 1] comparison[f"{n}. {gen.function_name()}"] = gen.generate_data(limit) - self.print_values_queried(table_name, n, gen) + self._print_values_queried(table_name, n, gen) self.print_table_by_columns(comparison) - def print_values_queried(self, table_name: str, n: int, gen: Generator): + def _print_values_queried(self, table_name: str, n: int, gen: Generator): """ Print the values queried from the database for this generator. """ @@ -729,10 +746,10 @@ def print_values_queried(self, table_name: str, n: int, gen: Generator): n, gen.function_name(), ) - self.print_select_aggregate_query(table_name, gen) - self.print_custom_queries(gen) + self._print_select_aggregate_query(table_name, gen) + self._print_custom_queries(gen) - def print_custom_queries(self, gen: Generator) -> None: + def _print_custom_queries(self, gen: Generator) -> None: """ Print all the custom queries and all the values they get in this case. """ @@ -763,7 +780,7 @@ def _get_aggregate_query(self, gens: list[Generator], table_name: str) -> str | return None return f"SELECT {', '.join(clauses)} FROM {table_name}" - def print_select_aggregate_query(self, table_name, gen: Generator) -> None: + def _print_select_aggregate_query(self, table_name, gen: Generator) -> None: """ Prints the select aggregate query and all the values it gets in this case. """ @@ -786,7 +803,7 @@ def print_select_aggregate_query(self, table_name, gen: Generator) -> None: select_q = self._get_aggregate_query([gen], table_name) self.print("{0}; providing the following values: {1}", select_q, vals) - def get_column_data(self, count: int, to_str=repr, min_length: int = 0): + def _get_column_data(self, count: int, to_str=repr, min_length: int = 0): column = str(self.get_column_name()) where = "" if 0 < min_length: @@ -814,8 +831,8 @@ def do_propose(self, arg): the column and against each other) with the 'compare' command. """ limit = 5 - gens = self.get_generator_proposals() - sample = self.get_column_data(limit) + gens = self._get_generator_proposals() + sample = self._get_column_data(limit) self.print(self.PROPOSE_SOURCE_SAMPLE_TEXT, ",".join(sample)) for index, gen in enumerate(gens): fit = gen.fit() @@ -841,7 +858,7 @@ def do_set(self, arg: str): if not arg.isdigit(): self.print("set requires a single integer argument; 'set 3' sets the third generator that 'propose' lists.") return - gens = self.get_generator_proposals() + gens = self._get_generator_proposals() index = int(arg) if index < 1: self.print("set's argument must be at least 1") diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index 0969fc27..cd7bde3e 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -250,6 +250,30 @@ def get_related_table_names(table: Table) -> set[str]: } +def table_is_private(config: Mapping, table_name: str) -> bool: + """ + Return True if the table with name table_name is a primary private table + according to config. + """ + ts = config.get("tables", {}) + t = ts.get(table_name, {}) + return t.get("primary_private", False) + + +def primary_private_fks(config: Mapping, table: Table) -> list[str]: + """ + Returns the list of columns in the table that refer to primary private tables. + + A table that is not primary private but has a non-empty list of + primary_private_fks is secondary private. + """ + return [ + str(fk.referred_table.name) + for fk in table.foreign_key_constraints + if table_is_private(config, str(fk.referred_table.name)) + ] + + def get_vocabulary_table_names(config: Mapping) -> set[str]: """ Extract the table names with a vocabulary_table: true property. diff --git a/tests/test_interactive.py b/tests/test_interactive.py index 98ddaef2..39124f5f 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -218,8 +218,8 @@ def get_proposals(self) -> dict[str, tuple[int, str, str, list[str]]]: } -class ConfigureTablesTests(RequiresDBTestCase): - """Testing configure-tables.""" +class ConfigureGeneratorsTests(RequiresDBTestCase): + """ Testing configure-generators. """ dump_file_path = "instrument.sql" database_name = "instrument" schema_name = "public" @@ -436,3 +436,35 @@ def test_next_completion(self): ) self.assertListEqual(gc.complete_next("string.q", "next string.q", 5, 12), []) self.assertListEqual(gc.complete_next("ww", "next ww", 5, 7), []) + + def test_compare_reports_privacy(self): + """ + Test that compare reports whether the current table is primary private, + secondary private or not private. + """ + metadata = MetaData() + metadata.reflect(self.engine) + config = { + "tables": { + "model": { + "primary_private": True, + } + }, + } + with TestGeneratorCmd(self.dsn, self.schema_name, metadata, copy.deepcopy(config)) as gc: + gc.do_next("manufacturer") + gc.reset() + gc.do_compare("") + (text, args, kwargs) = gc.messages[0] + self.assertEqual(text, gc.NOT_PRIVATE_TEXT) + gc.do_next("model") + gc.reset() + gc.do_compare("") + (text, args, kwargs) = gc.messages[0] + self.assertEqual(text, gc.PRIMARY_PRIVATE_TEXT) + gc.do_next("string") + gc.reset() + gc.do_compare("") + (text, args, kwargs) = gc.messages[0] + self.assertEqual(text, gc.SECONDARY_PRIVATE_TEXT) + self.assertSequenceEqual(args, [["model"]]) From fbdea224dd8194dba11c948152c7868053b50949 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 24 Apr 2025 22:14:49 +0100 Subject: [PATCH 60/85] testing that configure-generators does not damage other configuration --- tests/test_interactive.py | 38 +++++++++++++++++++++++++++++++++++--- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/tests/test_interactive.py b/tests/test_interactive.py index 39124f5f..ddd1b1e7 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -455,16 +455,48 @@ def test_compare_reports_privacy(self): gc.do_next("manufacturer") gc.reset() gc.do_compare("") - (text, args, kwargs) = gc.messages[0] + (text, args, _kwargs) = gc.messages[0] self.assertEqual(text, gc.NOT_PRIVATE_TEXT) gc.do_next("model") gc.reset() gc.do_compare("") - (text, args, kwargs) = gc.messages[0] + (text, args, _kwargs) = gc.messages[0] self.assertEqual(text, gc.PRIMARY_PRIVATE_TEXT) gc.do_next("string") gc.reset() gc.do_compare("") - (text, args, kwargs) = gc.messages[0] + (text, args, _kwargs) = gc.messages[0] self.assertEqual(text, gc.SECONDARY_PRIVATE_TEXT) self.assertSequenceEqual(args, [["model"]]) + + def test_existing_configuration_remains(self): + """ + Test setting a generator does not remove other information. + """ + metadata = MetaData() + metadata.reflect(self.engine) + config = { + "tables": { + "string": { + "primary_private": True, + } + }, + "src-stats": [{ + "name": "kraken", + "query": 'SELECT MAX(frequency) AS max_frequency FROM string', + }] + } + with TestGeneratorCmd(self.dsn, self.schema_name, metadata, copy.deepcopy(config)) as gc: + COLUMN = "position" + GENERATOR = "dist_gen.uniform_ms" + gc.do_next(f"string.{COLUMN}") + gc.do_propose("") + proposals = gc.get_proposals() + gc.do_set(str(proposals[f"{GENERATOR}"][0])) + gc.do_quit("") + src_stats = { + stat["name"]: stat["query"] + for stat in gc.config["src-stats"] + } + self.assertEqual(src_stats["kraken"], config["src-stats"][0]["query"]) + self.assertTrue(gc.config["tables"]["string"]["primary_private"]) From ab025428632c71b315bbc78e441d75890ec33996 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Sat, 26 Apr 2025 22:10:20 +0100 Subject: [PATCH 61/85] Updated dependencies to work with Python 3.13 --- poetry.lock | 573 +++++++++++++++++++++++-------------------------- pyproject.toml | 2 +- 2 files changed, 264 insertions(+), 311 deletions(-) diff --git a/poetry.lock b/poetry.lock index 632700a1..d345b25b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -204,13 +204,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2025.1.31" +version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, ] [[package]] @@ -352,22 +352,18 @@ files = [ [[package]] name = "dataclasses-json" -version = "0.5.9" -description = "Easily serialize dataclasses to and from JSON" +version = "0.6.7" +description = "Easily serialize dataclasses to and from JSON." optional = false -python-versions = ">=3.6" +python-versions = "<4.0,>=3.7" files = [ - {file = "dataclasses-json-0.5.9.tar.gz", hash = "sha256:e9ac87b73edc0141aafbce02b44e93553c3123ad574958f0fe52a534b6707e8e"}, - {file = "dataclasses_json-0.5.9-py3-none-any.whl", hash = "sha256:1280542631df1c375b7bc92e5b86d39e06c44760d7e3571a537b3b8acabf2f0c"}, + {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, + {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, ] [package.dependencies] -marshmallow = ">=3.3.0,<4.0.0" -marshmallow-enum = ">=1.5.1,<2.0.0" -typing-inspect = ">=0.4.0" - -[package.extras] -dev = ["flake8", "hypothesis", "ipython", "mypy (>=0.710)", "portray", "pytest (>=7.2.0)", "setuptools", "simplejson", "twine", "types-dataclasses", "wheel"] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" [[package]] name = "deprecated" @@ -388,13 +384,13 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] [[package]] name = "dill" -version = "0.3.9" +version = "0.4.0" description = "serialize all of Python" optional = false python-versions = ">=3.8" files = [ - {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, - {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, + {file = "dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049"}, + {file = "dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0"}, ] [package.extras] @@ -457,109 +453,81 @@ test = ["coverage", "pytest (>=7,<8.1)", "pytest-cov", "pytest-mock (>=3)"] [[package]] name = "greenlet" -version = "3.1.1" +version = "3.2.1" description = "Lightweight in-process concurrent programming" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, - {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, - {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, - {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, - {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, - {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, - {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, - {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, - {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, - {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, - {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, - {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, - {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, - {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, - {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, - {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, - {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, + {file = "greenlet-3.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:777c1281aa7c786738683e302db0f55eb4b0077c20f1dc53db8852ffaea0a6b0"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3059c6f286b53ea4711745146ffe5a5c5ff801f62f6c56949446e0f6461f8157"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1a40a17e2c7348f5eee5d8e1b4fa6a937f0587eba89411885a36a8e1fc29bd2"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5193135b3a8d0017cb438de0d49e92bf2f6c1c770331d24aa7500866f4db4017"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639a94d001fe874675b553f28a9d44faed90f9864dc57ba0afef3f8d76a18b04"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fe303381e7e909e42fb23e191fc69659910909fdcd056b92f6473f80ef18543"}, + {file = "greenlet-3.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:72c9b668454e816b5ece25daac1a42c94d1c116d5401399a11b77ce8d883110c"}, + {file = "greenlet-3.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6079ae990bbf944cf66bea64a09dcb56085815630955109ffa98984810d71565"}, + {file = "greenlet-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:e63cd2035f49376a23611fbb1643f78f8246e9d4dfd607534ec81b175ce582c2"}, + {file = "greenlet-3.2.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:aa30066fd6862e1153eaae9b51b449a6356dcdb505169647f69e6ce315b9468b"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b0f3a0a67786facf3b907a25db80efe74310f9d63cc30869e49c79ee3fcef7e"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64a4d0052de53ab3ad83ba86de5ada6aeea8f099b4e6c9ccce70fb29bc02c6a2"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852ef432919830022f71a040ff7ba3f25ceb9fe8f3ab784befd747856ee58530"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4818116e75a0dd52cdcf40ca4b419e8ce5cb6669630cb4f13a6c384307c9543f"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9afa05fe6557bce1642d8131f87ae9462e2a8e8c46f7ed7929360616088a3975"}, + {file = "greenlet-3.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5c12f0d17a88664757e81a6e3fc7c2452568cf460a2f8fb44f90536b2614000b"}, + {file = "greenlet-3.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dbb4e1aa2000852937dd8f4357fb73e3911da426df8ca9b8df5db231922da474"}, + {file = "greenlet-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:cb5ee928ce5fedf9a4b0ccdc547f7887136c4af6109d8f2fe8e00f90c0db47f5"}, + {file = "greenlet-3.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:0ba2811509a30e5f943be048895a983a8daf0b9aa0ac0ead526dfb5d987d80ea"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4245246e72352b150a1588d43ddc8ab5e306bef924c26571aafafa5d1aaae4e8"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7abc0545d8e880779f0c7ce665a1afc3f72f0ca0d5815e2b006cafc4c1cc5840"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dcc6d604a6575c6225ac0da39df9335cc0c6ac50725063fa90f104f3dbdb2c9"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2273586879affca2d1f414709bb1f61f0770adcabf9eda8ef48fd90b36f15d12"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ff38c869ed30fff07f1452d9a204ece1ec6d3c0870e0ba6e478ce7c1515acf22"}, + {file = "greenlet-3.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e934591a7a4084fa10ee5ef50eb9d2ac8c4075d5c9cf91128116b5dca49d43b1"}, + {file = "greenlet-3.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:063bcf7f8ee28eb91e7f7a8148c65a43b73fbdc0064ab693e024b5a940070145"}, + {file = "greenlet-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7132e024ebeeeabbe661cf8878aac5d2e643975c4feae833142592ec2f03263d"}, + {file = "greenlet-3.2.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:e1967882f0c42eaf42282a87579685c8673c51153b845fde1ee81be720ae27ac"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e77ae69032a95640a5fe8c857ec7bee569a0997e809570f4c92048691ce4b437"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3227c6ec1149d4520bc99edac3b9bc8358d0034825f3ca7572165cb502d8f29a"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ddda0197c5b46eedb5628d33dad034c455ae77708c7bf192686e760e26d6a0c"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de62b542e5dcf0b6116c310dec17b82bb06ef2ceb696156ff7bf74a7a498d982"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c07a0c01010df42f1f058b3973decc69c4d82e036a951c3deaf89ab114054c07"}, + {file = "greenlet-3.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2530bfb0abcd451ea81068e6d0a1aac6dabf3f4c23c8bd8e2a8f579c2dd60d95"}, + {file = "greenlet-3.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c472adfca310f849903295c351d297559462067f618944ce2650a1878b84123"}, + {file = "greenlet-3.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:24a496479bc8bd01c39aa6516a43c717b4cee7196573c47b1f8e1011f7c12495"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:175d583f7d5ee57845591fc30d852b75b144eb44b05f38b67966ed6df05c8526"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ecc9d33ca9428e4536ea53e79d781792cee114d2fa2695b173092bdbd8cd6d5"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f56382ac4df3860ebed8ed838f268f03ddf4e459b954415534130062b16bc32"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc45a7189c91c0f89aaf9d69da428ce8301b0fd66c914a499199cfb0c28420fc"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51a2f49da08cff79ee42eb22f1658a2aed60c72792f0a0a95f5f0ca6d101b1fb"}, + {file = "greenlet-3.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:0c68bbc639359493420282d2f34fa114e992a8724481d700da0b10d10a7611b8"}, + {file = "greenlet-3.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:e775176b5c203a1fa4be19f91da00fd3bff536868b77b237da3f4daa5971ae5d"}, + {file = "greenlet-3.2.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d6668caf15f181c1b82fb6406f3911696975cc4c37d782e19cb7ba499e556189"}, + {file = "greenlet-3.2.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:17964c246d4f6e1327edd95e2008988a8995ae3a7732be2f9fc1efed1f1cdf8c"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04b4ec7f65f0e4a1500ac475c9343f6cc022b2363ebfb6e94f416085e40dea15"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b38d53cf268da963869aa25a6e4cc84c1c69afc1ae3391738b2603d110749d01"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a7490f74e8aabc5f29256765a99577ffde979920a2db1f3676d265a3adba41"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4339b202ac20a89ccd5bde0663b4d00dc62dd25cb3fb14f7f3034dec1b0d9ece"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a750f1046994b9e038b45ae237d68153c29a3a783075211fb1414a180c8324b"}, + {file = "greenlet-3.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:374ffebaa5fbd10919cd599e5cf8ee18bae70c11f9d61e73db79826c8c93d6f9"}, + {file = "greenlet-3.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b89e5d44f55372efc6072f59ced5ed1efb7b44213dab5ad7e0caba0232c6545"}, + {file = "greenlet-3.2.1-cp39-cp39-win32.whl", hash = "sha256:b7503d6b8bbdac6bbacf5a8c094f18eab7553481a1830975799042f26c9e101b"}, + {file = "greenlet-3.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:e98328b8b8f160925d6b1c5b1879d8e64f6bd8cf11472b7127d579da575b77d9"}, + {file = "greenlet-3.2.1.tar.gz", hash = "sha256:9f4dd4b4946b14bb3bf038f81e1d2e535b7d94f1b2a59fdba1293cd9c1a0a4d7"}, ] [package.extras] docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] -[[package]] -name = "htmlmin" -version = "0.1.12" -description = "An HTML Minifier" -optional = false -python-versions = "*" -files = [ - {file = "htmlmin-0.1.12.tar.gz", hash = "sha256:50c1ef4630374a5d723900096a961cff426dff46b48f34d194a81bbe14eca178"}, -] - [[package]] name = "identify" -version = "2.6.9" +version = "2.6.10" description = "File identification library for Python" optional = false python-versions = ">=3.9" files = [ - {file = "identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150"}, - {file = "identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf"}, + {file = "identify-2.6.10-py2.py3-none-any.whl", hash = "sha256:5f34248f54136beed1a7ba6a6b5c4b6cf21ff495aac7c359e1ef831ae3b8ab25"}, + {file = "identify-2.6.10.tar.gz", hash = "sha256:45e92fd704f3da71cc3880036633f48b4b7265fd4de2b57627cb157216eb7eb8"}, ] [package.extras] @@ -623,26 +591,24 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "json-schema-for-humans" -version = "0.44.8" +version = "1.3.4" description = "Generate static HTML documentation from JSON schemas" optional = false -python-versions = ">=3.7,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "json_schema_for_humans-0.44.8-py3-none-any.whl", hash = "sha256:4d440dae04d86776c3f93588cba2e10cdd7be22face9a519b01d450d64fe8cf1"}, - {file = "json_schema_for_humans-0.44.8.tar.gz", hash = "sha256:78634f243be53f11060c9dcb970d72993603b619048ac0c6fce9a9fd101944b1"}, + {file = "json_schema_for_humans-1.3.4-py3-none-any.whl", hash = "sha256:565232f691869986bbf4106deb0a57f45ea30310daad4875dcc32115e6ee8ddd"}, + {file = "json_schema_for_humans-1.3.4.tar.gz", hash = "sha256:4f0614304cb0bdc2324730c1e6987b3157f3b2474d6cce5964e644b3707ab8ea"}, ] [package.dependencies] click = ">=8.0.1,<9.0.0" -dataclasses-json = ">=0.5.6,<0.6.0" -htmlmin = ">=0.1.12,<0.2.0" +dataclasses-json = ">=0.6.7,<0.7.0" Jinja2 = ">3" -markdown2 = ">=2.4.1,<3.0.0" -MarkupSafe = ">=2.0,<3.0" -Pygments = ">=2.10.0,<3.0.0" +markdown2 = ">=2.5.0,<3.0.0" +Pygments = ">=2.18.0,<3.0.0" pytz = "*" -PyYAML = ">=5.4.1,<7" -requests = ">=2.31.0,<3.0.0" +PyYAML = ">=6.0.2,<7.0.0" +requests = ">=2.32.3,<3.0.0" [[package]] name = "jsonschema" @@ -667,13 +633,13 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2024.10.1" +version = "2025.4.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" files = [ - {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, - {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, + {file = "jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af"}, + {file = "jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608"}, ] [package.dependencies] @@ -698,71 +664,72 @@ wavedrom = ["wavedrom"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] @@ -784,20 +751,6 @@ dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"] tests = ["pytest", "simplejson"] -[[package]] -name = "marshmallow-enum" -version = "1.5.1" -description = "Enum field for Marshmallow" -optional = false -python-versions = "*" -files = [ - {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, - {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, -] - -[package.dependencies] -marshmallow = ">=2.0.0" - [[package]] name = "mccabe" version = "0.7.0" @@ -879,13 +832,13 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "1.0.0" +version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] [[package]] @@ -901,66 +854,66 @@ files = [ [[package]] name = "numpy" -version = "2.2.4" +version = "2.2.5" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" files = [ - {file = "numpy-2.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8146f3550d627252269ac42ae660281d673eb6f8b32f113538e0cc2a9aed42b9"}, - {file = "numpy-2.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e642d86b8f956098b564a45e6f6ce68a22c2c97a04f5acd3f221f57b8cb850ae"}, - {file = "numpy-2.2.4-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:a84eda42bd12edc36eb5b53bbcc9b406820d3353f1994b6cfe453a33ff101775"}, - {file = "numpy-2.2.4-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:4ba5054787e89c59c593a4169830ab362ac2bee8a969249dc56e5d7d20ff8df9"}, - {file = "numpy-2.2.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7716e4a9b7af82c06a2543c53ca476fa0b57e4d760481273e09da04b74ee6ee2"}, - {file = "numpy-2.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf8c1d66f432ce577d0197dceaac2ac00c0759f573f28516246351c58a85020"}, - {file = "numpy-2.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:218f061d2faa73621fa23d6359442b0fc658d5b9a70801373625d958259eaca3"}, - {file = "numpy-2.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:df2f57871a96bbc1b69733cd4c51dc33bea66146b8c63cacbfed73eec0883017"}, - {file = "numpy-2.2.4-cp310-cp310-win32.whl", hash = "sha256:a0258ad1f44f138b791327961caedffbf9612bfa504ab9597157806faa95194a"}, - {file = "numpy-2.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:0d54974f9cf14acf49c60f0f7f4084b6579d24d439453d5fc5805d46a165b542"}, - {file = "numpy-2.2.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e9e0a277bb2eb5d8a7407e14688b85fd8ad628ee4e0c7930415687b6564207a4"}, - {file = "numpy-2.2.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eeea959168ea555e556b8188da5fa7831e21d91ce031e95ce23747b7609f8a4"}, - {file = "numpy-2.2.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bd3ad3b0a40e713fc68f99ecfd07124195333f1e689387c180813f0e94309d6f"}, - {file = "numpy-2.2.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cf28633d64294969c019c6df4ff37f5698e8326db68cc2b66576a51fad634880"}, - {file = "numpy-2.2.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fa8fa7697ad1646b5c93de1719965844e004fcad23c91228aca1cf0800044a1"}, - {file = "numpy-2.2.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4162988a360a29af158aeb4a2f4f09ffed6a969c9776f8f3bdee9b06a8ab7e5"}, - {file = "numpy-2.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:892c10d6a73e0f14935c31229e03325a7b3093fafd6ce0af704be7f894d95687"}, - {file = "numpy-2.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db1f1c22173ac1c58db249ae48aa7ead29f534b9a948bc56828337aa84a32ed6"}, - {file = "numpy-2.2.4-cp311-cp311-win32.whl", hash = "sha256:ea2bb7e2ae9e37d96835b3576a4fa4b3a97592fbea8ef7c3587078b0068b8f09"}, - {file = "numpy-2.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:f7de08cbe5551911886d1ab60de58448c6df0f67d9feb7d1fb21e9875ef95e91"}, - {file = "numpy-2.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a7b9084668aa0f64e64bd00d27ba5146ef1c3a8835f3bd912e7a9e01326804c4"}, - {file = "numpy-2.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbe512c511956b893d2dacd007d955a3f03d555ae05cfa3ff1c1ff6df8851854"}, - {file = "numpy-2.2.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bb649f8b207ab07caebba230d851b579a3c8711a851d29efe15008e31bb4de24"}, - {file = "numpy-2.2.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:f34dc300df798742b3d06515aa2a0aee20941c13579d7a2f2e10af01ae4901ee"}, - {file = "numpy-2.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3f7ac96b16955634e223b579a3e5798df59007ca43e8d451a0e6a50f6bfdfba"}, - {file = "numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f92084defa704deadd4e0a5ab1dc52d8ac9e8a8ef617f3fbb853e79b0ea3592"}, - {file = "numpy-2.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4e84a6283b36632e2a5b56e121961f6542ab886bc9e12f8f9818b3c266bfbb"}, - {file = "numpy-2.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:11c43995255eb4127115956495f43e9343736edb7fcdb0d973defd9de14cd84f"}, - {file = "numpy-2.2.4-cp312-cp312-win32.whl", hash = "sha256:65ef3468b53269eb5fdb3a5c09508c032b793da03251d5f8722b1194f1790c00"}, - {file = "numpy-2.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:2aad3c17ed2ff455b8eaafe06bcdae0062a1db77cb99f4b9cbb5f4ecb13c5146"}, - {file = "numpy-2.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cf4e5c6a278d620dee9ddeb487dc6a860f9b199eadeecc567f777daace1e9e7"}, - {file = "numpy-2.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1974afec0b479e50438fc3648974268f972e2d908ddb6d7fb634598cdb8260a0"}, - {file = "numpy-2.2.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:79bd5f0a02aa16808fcbc79a9a376a147cc1045f7dfe44c6e7d53fa8b8a79392"}, - {file = "numpy-2.2.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:3387dd7232804b341165cedcb90694565a6015433ee076c6754775e85d86f1fc"}, - {file = "numpy-2.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f527d8fdb0286fd2fd97a2a96c6be17ba4232da346931d967a0630050dfd298"}, - {file = "numpy-2.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce43e386c16898b91e162e5baaad90c4b06f9dcbe36282490032cec98dc8ae7"}, - {file = "numpy-2.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31504f970f563d99f71a3512d0c01a645b692b12a63630d6aafa0939e52361e6"}, - {file = "numpy-2.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:81413336ef121a6ba746892fad881a83351ee3e1e4011f52e97fba79233611fd"}, - {file = "numpy-2.2.4-cp313-cp313-win32.whl", hash = "sha256:f486038e44caa08dbd97275a9a35a283a8f1d2f0ee60ac260a1790e76660833c"}, - {file = "numpy-2.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:207a2b8441cc8b6a2a78c9ddc64d00d20c303d79fba08c577752f080c4007ee3"}, - {file = "numpy-2.2.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8120575cb4882318c791f839a4fd66161a6fa46f3f0a5e613071aae35b5dd8f8"}, - {file = "numpy-2.2.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a761ba0fa886a7bb33c6c8f6f20213735cb19642c580a931c625ee377ee8bd39"}, - {file = "numpy-2.2.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ac0280f1ba4a4bfff363a99a6aceed4f8e123f8a9b234c89140f5e894e452ecd"}, - {file = "numpy-2.2.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:879cf3a9a2b53a4672a168c21375166171bc3932b7e21f622201811c43cdd3b0"}, - {file = "numpy-2.2.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05d4198c1bacc9124018109c5fba2f3201dbe7ab6e92ff100494f236209c960"}, - {file = "numpy-2.2.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f085ce2e813a50dfd0e01fbfc0c12bbe5d2063d99f8b29da30e544fb6483b8"}, - {file = "numpy-2.2.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:92bda934a791c01d6d9d8e038363c50918ef7c40601552a58ac84c9613a665bc"}, - {file = "numpy-2.2.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ee4d528022f4c5ff67332469e10efe06a267e32f4067dc76bb7e2cddf3cd25ff"}, - {file = "numpy-2.2.4-cp313-cp313t-win32.whl", hash = "sha256:05c076d531e9998e7e694c36e8b349969c56eadd2cdcd07242958489d79a7286"}, - {file = "numpy-2.2.4-cp313-cp313t-win_amd64.whl", hash = "sha256:188dcbca89834cc2e14eb2f106c96d6d46f200fe0200310fc29089657379c58d"}, - {file = "numpy-2.2.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7051ee569db5fbac144335e0f3b9c2337e0c8d5c9fee015f259a5bd70772b7e8"}, - {file = "numpy-2.2.4-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ab2939cd5bec30a7430cbdb2287b63151b77cf9624de0532d629c9a1c59b1d5c"}, - {file = "numpy-2.2.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f35b19894a9e08639fd60a1ec1978cb7f5f7f1eace62f38dd36be8aecdef4d"}, - {file = "numpy-2.2.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b4adfbbc64014976d2f91084915ca4e626fbf2057fb81af209c1a6d776d23e3d"}, - {file = "numpy-2.2.4.tar.gz", hash = "sha256:9ba03692a45d3eef66559efe1d1096c4b9b75c0986b5dff5530c378fb8331d4f"}, + {file = "numpy-2.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f4a922da1729f4c40932b2af4fe84909c7a6e167e6e99f71838ce3a29f3fe26"}, + {file = "numpy-2.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6f91524d31b34f4a5fee24f5bc16dcd1491b668798b6d85585d836c1e633a6a"}, + {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:19f4718c9012e3baea91a7dba661dcab2451cda2550678dc30d53acb91a7290f"}, + {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:eb7fd5b184e5d277afa9ec0ad5e4eb562ecff541e7f60e69ee69c8d59e9aeaba"}, + {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6413d48a9be53e183eb06495d8e3b006ef8f87c324af68241bbe7a39e8ff54c3"}, + {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7451f92eddf8503c9b8aa4fe6aa7e87fd51a29c2cfc5f7dbd72efde6c65acf57"}, + {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0bcb1d057b7571334139129b7f941588f69ce7c4ed15a9d6162b2ea54ded700c"}, + {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:36ab5b23915887543441efd0417e6a3baa08634308894316f446027611b53bf1"}, + {file = "numpy-2.2.5-cp310-cp310-win32.whl", hash = "sha256:422cc684f17bc963da5f59a31530b3936f57c95a29743056ef7a7903a5dbdf88"}, + {file = "numpy-2.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:e4f0b035d9d0ed519c813ee23e0a733db81ec37d2e9503afbb6e54ccfdee0fa7"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c42365005c7a6c42436a54d28c43fe0e01ca11eb2ac3cefe796c25a5f98e5e9b"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:498815b96f67dc347e03b719ef49c772589fb74b8ee9ea2c37feae915ad6ebda"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:6411f744f7f20081b1b4e7112e0f4c9c5b08f94b9f086e6f0adf3645f85d3a4d"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9de6832228f617c9ef45d948ec1cd8949c482238d68b2477e6f642c33a7b0a54"}, + {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:369e0d4647c17c9363244f3468f2227d557a74b6781cb62ce57cf3ef5cc7c610"}, + {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:262d23f383170f99cd9191a7c85b9a50970fe9069b2f8ab5d786eca8a675d60b"}, + {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa70fdbdc3b169d69e8c59e65c07a1c9351ceb438e627f0fdcd471015cd956be"}, + {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37e32e985f03c06206582a7323ef926b4e78bdaa6915095ef08070471865b906"}, + {file = "numpy-2.2.5-cp311-cp311-win32.whl", hash = "sha256:f5045039100ed58fa817a6227a356240ea1b9a1bc141018864c306c1a16d4175"}, + {file = "numpy-2.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:b13f04968b46ad705f7c8a80122a42ae8f620536ea38cf4bdd374302926424dd"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ee461a4eaab4f165b68780a6a1af95fb23a29932be7569b9fab666c407969051"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec31367fd6a255dc8de4772bd1658c3e926d8e860a0b6e922b615e532d320ddc"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:47834cde750d3c9f4e52c6ca28a7361859fcaf52695c7dc3cc1a720b8922683e"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:2c1a1c6ccce4022383583a6ded7bbcda22fc635eb4eb1e0a053336425ed36dfa"}, + {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d75f338f5f79ee23548b03d801d28a505198297534f62416391857ea0479571"}, + {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a801fef99668f309b88640e28d261991bfad9617c27beda4a3aec4f217ea073"}, + {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:abe38cd8381245a7f49967a6010e77dbf3680bd3627c0fe4362dd693b404c7f8"}, + {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a0ac90e46fdb5649ab6369d1ab6104bfe5854ab19b645bf5cda0127a13034ae"}, + {file = "numpy-2.2.5-cp312-cp312-win32.whl", hash = "sha256:0cd48122a6b7eab8f06404805b1bd5856200e3ed6f8a1b9a194f9d9054631beb"}, + {file = "numpy-2.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:ced69262a8278547e63409b2653b372bf4baff0870c57efa76c5703fd6543282"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:059b51b658f4414fff78c6d7b1b4e18283ab5fa56d270ff212d5ba0c561846f4"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47f9ed103af0bc63182609044b0490747e03bd20a67e391192dde119bf43d52f"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:261a1ef047751bb02f29dfe337230b5882b54521ca121fc7f62668133cb119c9"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4520caa3807c1ceb005d125a75e715567806fed67e315cea619d5ec6e75a4191"}, + {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d14b17b9be5f9c9301f43d2e2a4886a33b53f4e6fdf9ca2f4cc60aeeee76372"}, + {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba321813a00e508d5421104464510cc962a6f791aa2fca1c97b1e65027da80d"}, + {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4cbdef3ddf777423060c6f81b5694bad2dc9675f110c4b2a60dc0181543fac7"}, + {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:54088a5a147ab71a8e7fdfd8c3601972751ded0739c6b696ad9cb0343e21ab73"}, + {file = "numpy-2.2.5-cp313-cp313-win32.whl", hash = "sha256:c8b82a55ef86a2d8e81b63da85e55f5537d2157165be1cb2ce7cfa57b6aef38b"}, + {file = "numpy-2.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:d8882a829fd779f0f43998e931c466802a77ca1ee0fe25a3abe50278616b1471"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e8b025c351b9f0e8b5436cf28a07fa4ac0204d67b38f01433ac7f9b870fa38c6"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dfa94b6a4374e7851bbb6f35e6ded2120b752b063e6acdd3157e4d2bb922eba"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:97c8425d4e26437e65e1d189d22dff4a079b747ff9c2788057bfb8114ce1e133"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:352d330048c055ea6db701130abc48a21bec690a8d38f8284e00fab256dc1376"}, + {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b4c0773b6ada798f51f0f8e30c054d32304ccc6e9c5d93d46cb26f3d385ab19"}, + {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55f09e00d4dccd76b179c0f18a44f041e5332fd0e022886ba1c0bbf3ea4a18d0"}, + {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02f226baeefa68f7d579e213d0f3493496397d8f1cff5e2b222af274c86a552a"}, + {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c26843fd58f65da9491165072da2cccc372530681de481ef670dcc8e27cfb066"}, + {file = "numpy-2.2.5-cp313-cp313t-win32.whl", hash = "sha256:1a161c2c79ab30fe4501d5a2bbfe8b162490757cf90b7f05be8b80bc02f7bb8e"}, + {file = "numpy-2.2.5-cp313-cp313t-win_amd64.whl", hash = "sha256:d403c84991b5ad291d3809bace5e85f4bbf44a04bdc9a88ed2bb1807b3360bb8"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b4ea7e1cff6784e58fe281ce7e7f05036b3e1c89c6f922a6bfbc0a7e8768adbe"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d7543263084a85fbc09c704b515395398d31d6395518446237eac219eab9e55e"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0255732338c4fdd00996c0421884ea8a3651eea555c3a56b84892b66f696eb70"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d2e3bdadaba0e040d1e7ab39db73e0afe2c74ae277f5614dad53eadbecbbb169"}, + {file = "numpy-2.2.5.tar.gz", hash = "sha256:a9c0d994680cd991b1cb772e8b297340085466a6fe964bc9d4e80f5e2f43c291"}, ] [[package]] @@ -990,13 +943,13 @@ scikit-learn = ["numpy", "randomgen (>=2.0.0)", "scikit-learn"] [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] @@ -1265,61 +1218,61 @@ files = [ [[package]] name = "pydantic" -version = "1.10.21" +version = "1.10.22" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.21-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:245e486e0fec53ec2366df9cf1cba36e0bbf066af7cd9c974bbbd9ba10e1e586"}, - {file = "pydantic-1.10.21-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c54f8d4c151c1de784c5b93dfbb872067e3414619e10e21e695f7bb84d1d1fd"}, - {file = "pydantic-1.10.21-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b64708009cfabd9c2211295144ff455ec7ceb4c4fb45a07a804309598f36187"}, - {file = "pydantic-1.10.21-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a148410fa0e971ba333358d11a6dea7b48e063de127c2b09ece9d1c1137dde4"}, - {file = "pydantic-1.10.21-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:36ceadef055af06e7756eb4b871cdc9e5a27bdc06a45c820cd94b443de019bbf"}, - {file = "pydantic-1.10.21-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0501e1d12df6ab1211b8cad52d2f7b2cd81f8e8e776d39aa5e71e2998d0379f"}, - {file = "pydantic-1.10.21-cp310-cp310-win_amd64.whl", hash = "sha256:c261127c275d7bce50b26b26c7d8427dcb5c4803e840e913f8d9df3f99dca55f"}, - {file = "pydantic-1.10.21-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8b6350b68566bb6b164fb06a3772e878887f3c857c46c0c534788081cb48adf4"}, - {file = "pydantic-1.10.21-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:935b19fdcde236f4fbf691959fa5c3e2b6951fff132964e869e57c70f2ad1ba3"}, - {file = "pydantic-1.10.21-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b6a04efdcd25486b27f24c1648d5adc1633ad8b4506d0e96e5367f075ed2e0b"}, - {file = "pydantic-1.10.21-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1ba253eb5af8d89864073e6ce8e6c8dec5f49920cff61f38f5c3383e38b1c9f"}, - {file = "pydantic-1.10.21-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:57f0101e6c97b411f287a0b7cf5ebc4e5d3b18254bf926f45a11615d29475793"}, - {file = "pydantic-1.10.21-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90e85834f0370d737c77a386ce505c21b06bfe7086c1c568b70e15a568d9670d"}, - {file = "pydantic-1.10.21-cp311-cp311-win_amd64.whl", hash = "sha256:6a497bc66b3374b7d105763d1d3de76d949287bf28969bff4656206ab8a53aa9"}, - {file = "pydantic-1.10.21-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2ed4a5f13cf160d64aa331ab9017af81f3481cd9fd0e49f1d707b57fe1b9f3ae"}, - {file = "pydantic-1.10.21-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b7693bb6ed3fbe250e222f9415abb73111bb09b73ab90d2d4d53f6390e0ccc1"}, - {file = "pydantic-1.10.21-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185d5f1dff1fead51766da9b2de4f3dc3b8fca39e59383c273f34a6ae254e3e2"}, - {file = "pydantic-1.10.21-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38e6d35cf7cd1727822c79e324fa0677e1a08c88a34f56695101f5ad4d5e20e5"}, - {file = "pydantic-1.10.21-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1d7c332685eafacb64a1a7645b409a166eb7537f23142d26895746f628a3149b"}, - {file = "pydantic-1.10.21-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c9b782db6f993a36092480eeaab8ba0609f786041b01f39c7c52252bda6d85f"}, - {file = "pydantic-1.10.21-cp312-cp312-win_amd64.whl", hash = "sha256:7ce64d23d4e71d9698492479505674c5c5b92cda02b07c91dfc13633b2eef805"}, - {file = "pydantic-1.10.21-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0067935d35044950be781933ab91b9a708eaff124bf860fa2f70aeb1c4be7212"}, - {file = "pydantic-1.10.21-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5e8148c2ce4894ce7e5a4925d9d3fdce429fb0e821b5a8783573f3611933a251"}, - {file = "pydantic-1.10.21-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4973232c98b9b44c78b1233693e5e1938add5af18042f031737e1214455f9b8"}, - {file = "pydantic-1.10.21-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:662bf5ce3c9b1cef32a32a2f4debe00d2f4839fefbebe1d6956e681122a9c839"}, - {file = "pydantic-1.10.21-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:98737c3ab5a2f8a85f2326eebcd214510f898881a290a7939a45ec294743c875"}, - {file = "pydantic-1.10.21-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0bb58bbe65a43483d49f66b6c8474424d551a3fbe8a7796c42da314bac712738"}, - {file = "pydantic-1.10.21-cp313-cp313-win_amd64.whl", hash = "sha256:e622314542fb48542c09c7bd1ac51d71c5632dd3c92dc82ede6da233f55f4848"}, - {file = "pydantic-1.10.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d356aa5b18ef5a24d8081f5c5beb67c0a2a6ff2a953ee38d65a2aa96526b274f"}, - {file = "pydantic-1.10.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08caa8c0468172d27c669abfe9e7d96a8b1655ec0833753e117061febaaadef5"}, - {file = "pydantic-1.10.21-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c677aa39ec737fec932feb68e4a2abe142682f2885558402602cd9746a1c92e8"}, - {file = "pydantic-1.10.21-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:79577cc045d3442c4e845df53df9f9202546e2ba54954c057d253fc17cd16cb1"}, - {file = "pydantic-1.10.21-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:b6b73ab347284719f818acb14f7cd80696c6fdf1bd34feee1955d7a72d2e64ce"}, - {file = "pydantic-1.10.21-cp37-cp37m-win_amd64.whl", hash = "sha256:46cffa24891b06269e12f7e1ec50b73f0c9ab4ce71c2caa4ccf1fb36845e1ff7"}, - {file = "pydantic-1.10.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:298d6f765e3c9825dfa78f24c1efd29af91c3ab1b763e1fd26ae4d9e1749e5c8"}, - {file = "pydantic-1.10.21-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2f4a2305f15eff68f874766d982114ac89468f1c2c0b97640e719cf1a078374"}, - {file = "pydantic-1.10.21-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35b263b60c519354afb3a60107d20470dd5250b3ce54c08753f6975c406d949b"}, - {file = "pydantic-1.10.21-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e23a97a6c2f2db88995496db9387cd1727acdacc85835ba8619dce826c0b11a6"}, - {file = "pydantic-1.10.21-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:3c96fed246ccc1acb2df032ff642459e4ae18b315ecbab4d95c95cfa292e8517"}, - {file = "pydantic-1.10.21-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b92893ebefc0151474f682e7debb6ab38552ce56a90e39a8834734c81f37c8a9"}, - {file = "pydantic-1.10.21-cp38-cp38-win_amd64.whl", hash = "sha256:b8460bc256bf0de821839aea6794bb38a4c0fbd48f949ea51093f6edce0be459"}, - {file = "pydantic-1.10.21-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d387940f0f1a0adb3c44481aa379122d06df8486cc8f652a7b3b0caf08435f7"}, - {file = "pydantic-1.10.21-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:266ecfc384861d7b0b9c214788ddff75a2ea123aa756bcca6b2a1175edeca0fe"}, - {file = "pydantic-1.10.21-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61da798c05a06a362a2f8c5e3ff0341743e2818d0f530eaac0d6898f1b187f1f"}, - {file = "pydantic-1.10.21-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a621742da75ce272d64ea57bd7651ee2a115fa67c0f11d66d9dcfc18c2f1b106"}, - {file = "pydantic-1.10.21-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9e3e4000cd54ef455694b8be9111ea20f66a686fc155feda1ecacf2322b115da"}, - {file = "pydantic-1.10.21-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f198c8206640f4c0ef5a76b779241efb1380a300d88b1bce9bfe95a6362e674d"}, - {file = "pydantic-1.10.21-cp39-cp39-win_amd64.whl", hash = "sha256:e7f0cda108b36a30c8fc882e4fc5b7eec8ef584aa43aa43694c6a7b274fb2b56"}, - {file = "pydantic-1.10.21-py3-none-any.whl", hash = "sha256:db70c920cba9d05c69ad4a9e7f8e9e83011abb2c6490e561de9ae24aee44925c"}, - {file = "pydantic-1.10.21.tar.gz", hash = "sha256:64b48e2b609a6c22178a56c408ee1215a7206077ecb8a193e2fda31858b2362a"}, + {file = "pydantic-1.10.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:57889565ccc1e5b7b73343329bbe6198ebc472e3ee874af2fa1865cfe7048228"}, + {file = "pydantic-1.10.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90729e22426de79bc6a3526b4c45ec4400caf0d4f10d7181ba7f12c01bb3897d"}, + {file = "pydantic-1.10.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8684d347f351554ec94fdcb507983d3116dc4577fb8799fed63c65869a2d10"}, + {file = "pydantic-1.10.22-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8dad498ceff2d9ef1d2e2bc6608f5b59b8e1ba2031759b22dfb8c16608e1802"}, + {file = "pydantic-1.10.22-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fac529cc654d4575cf8de191cce354b12ba705f528a0a5c654de6d01f76cd818"}, + {file = "pydantic-1.10.22-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4148232aded8dd1dd13cf910a01b32a763c34bd79a0ab4d1ee66164fcb0b7b9d"}, + {file = "pydantic-1.10.22-cp310-cp310-win_amd64.whl", hash = "sha256:ece68105d9e436db45d8650dc375c760cc85a6793ae019c08769052902dca7db"}, + {file = "pydantic-1.10.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e530a8da353f791ad89e701c35787418605d35085f4bdda51b416946070e938"}, + {file = "pydantic-1.10.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:654322b85642e9439d7de4c83cb4084ddd513df7ff8706005dada43b34544946"}, + {file = "pydantic-1.10.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8bece75bd1b9fc1c32b57a32831517943b1159ba18b4ba32c0d431d76a120ae"}, + {file = "pydantic-1.10.22-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eccb58767f13c6963dcf96d02cb8723ebb98b16692030803ac075d2439c07b0f"}, + {file = "pydantic-1.10.22-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7778e6200ff8ed5f7052c1516617423d22517ad36cc7a3aedd51428168e3e5e8"}, + {file = "pydantic-1.10.22-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffe02767d27c39af9ca7dc7cd479c00dda6346bb62ffc89e306f665108317a2"}, + {file = "pydantic-1.10.22-cp311-cp311-win_amd64.whl", hash = "sha256:23bc19c55427091b8e589bc08f635ab90005f2dc99518f1233386f46462c550a"}, + {file = "pydantic-1.10.22-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:92d0f97828a075a71d9efc65cf75db5f149b4d79a38c89648a63d2932894d8c9"}, + {file = "pydantic-1.10.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6af5a2811b6b95b58b829aeac5996d465a5f0c7ed84bd871d603cf8646edf6ff"}, + {file = "pydantic-1.10.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cf06d8d40993e79af0ab2102ef5da77b9ddba51248e4cb27f9f3f591fbb096e"}, + {file = "pydantic-1.10.22-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:184b7865b171a6057ad97f4a17fbac81cec29bd103e996e7add3d16b0d95f609"}, + {file = "pydantic-1.10.22-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:923ad861677ab09d89be35d36111156063a7ebb44322cdb7b49266e1adaba4bb"}, + {file = "pydantic-1.10.22-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:82d9a3da1686443fb854c8d2ab9a473251f8f4cdd11b125522efb4d7c646e7bc"}, + {file = "pydantic-1.10.22-cp312-cp312-win_amd64.whl", hash = "sha256:1612604929af4c602694a7f3338b18039d402eb5ddfbf0db44f1ebfaf07f93e7"}, + {file = "pydantic-1.10.22-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b259dc89c9abcd24bf42f31951fb46c62e904ccf4316393f317abeeecda39978"}, + {file = "pydantic-1.10.22-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9238aa0964d80c0908d2f385e981add58faead4412ca80ef0fa352094c24e46d"}, + {file = "pydantic-1.10.22-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8029f05b04080e3f1a550575a1bca747c0ea4be48e2d551473d47fd768fc1b"}, + {file = "pydantic-1.10.22-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c06918894f119e0431a36c9393bc7cceeb34d1feeb66670ef9b9ca48c073937"}, + {file = "pydantic-1.10.22-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e205311649622ee8fc1ec9089bd2076823797f5cd2c1e3182dc0e12aab835b35"}, + {file = "pydantic-1.10.22-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:815f0a73d5688d6dd0796a7edb9eca7071bfef961a7b33f91e618822ae7345b7"}, + {file = "pydantic-1.10.22-cp313-cp313-win_amd64.whl", hash = "sha256:9dfce71d42a5cde10e78a469e3d986f656afc245ab1b97c7106036f088dd91f8"}, + {file = "pydantic-1.10.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3ecaf8177b06aac5d1f442db1288e3b46d9f05f34fd17fdca3ad34105328b61a"}, + {file = "pydantic-1.10.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb36c2de9ea74bd7f66b5481dea8032d399affd1cbfbb9bb7ce539437f1fce62"}, + {file = "pydantic-1.10.22-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6b8d14a256be3b8fff9286d76c532f1a7573fbba5f189305b22471c6679854d"}, + {file = "pydantic-1.10.22-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:1c33269e815db4324e71577174c29c7aa30d1bba51340ce6be976f6f3053a4c6"}, + {file = "pydantic-1.10.22-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:8661b3ab2735b2a9ccca2634738534a795f4a10bae3ab28ec0a10c96baa20182"}, + {file = "pydantic-1.10.22-cp37-cp37m-win_amd64.whl", hash = "sha256:22bdd5fe70d4549995981c55b970f59de5c502d5656b2abdfcd0a25be6f3763e"}, + {file = "pydantic-1.10.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e3f33d1358aa4bc2795208cc29ff3118aeaad0ea36f0946788cf7cadeccc166b"}, + {file = "pydantic-1.10.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:813f079f9cd136cac621f3f9128a4406eb8abd2ad9fdf916a0731d91c6590017"}, + {file = "pydantic-1.10.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab618ab8dca6eac7f0755db25f6aba3c22c40e3463f85a1c08dc93092d917704"}, + {file = "pydantic-1.10.22-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d128e1aaa38db88caca920d5822c98fc06516a09a58b6d3d60fa5ea9099b32cc"}, + {file = "pydantic-1.10.22-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:cc97bbc25def7025e55fc9016080773167cda2aad7294e06a37dda04c7d69ece"}, + {file = "pydantic-1.10.22-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dda5d7157d543b1fa565038cae6e952549d0f90071c839b3740fb77c820fab8"}, + {file = "pydantic-1.10.22-cp38-cp38-win_amd64.whl", hash = "sha256:a093fe44fe518cb445d23119511a71f756f8503139d02fcdd1173f7b76c95ffe"}, + {file = "pydantic-1.10.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec54c89b2568b258bb30d7348ac4d82bec1b58b377fb56a00441e2ac66b24587"}, + {file = "pydantic-1.10.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8f1d1a1532e4f3bcab4e34e8d2197a7def4b67072acd26cfa60e92d75803a48"}, + {file = "pydantic-1.10.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ad83ca35508c27eae1005b6b61f369f78aae6d27ead2135ec156a2599910121"}, + {file = "pydantic-1.10.22-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53cdb44b78c420f570ff16b071ea8cd5a477635c6b0efc343c8a91e3029bbf1a"}, + {file = "pydantic-1.10.22-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:16d0a5ae9d98264186ce31acdd7686ec05fd331fab9d68ed777d5cb2d1514e5e"}, + {file = "pydantic-1.10.22-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8aee040e25843f036192b1a1af62117504a209a043aa8db12e190bb86ad7e611"}, + {file = "pydantic-1.10.22-cp39-cp39-win_amd64.whl", hash = "sha256:7f691eec68dbbfca497d3c11b92a3e5987393174cbedf03ec7a4184c35c2def6"}, + {file = "pydantic-1.10.22-py3-none-any.whl", hash = "sha256:343037d608bcbd34df937ac259708bfc83664dadf88afe8516c4f282d7d471a9"}, + {file = "pydantic-1.10.22.tar.gz", hash = "sha256:ee1006cebd43a8e7158fb7190bb8f4e2da9649719bff65d0c287282ec38dec6d"}, ] [package.dependencies] @@ -2383,4 +2336,4 @@ docs = ["sphinx-rtd-theme", "sphinxcontrib-napoleon"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.14" -content-hash = "70f8f1b1a164704cecab89ec9d5d8e1877d8dd0a7d1b0488dc0e1c3b4732b82a" +content-hash = "2624aea2caa08a5d9b569698ca403b26187f0f04d414965252db5c080626b6ce" diff --git a/pyproject.toml b/pyproject.toml index 2aa5bc28..1dc6acd9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ pydocstyle = "^6.3.0" restructuredtext-lint = "^1.4.0" pygments = "^2.14.0" rstcheck-core = {extras = ["sphinx"], version = "^1.0.3"} -json-schema-for-humans = "^0.44.5" +json-schema-for-humans = "^1.3.4" pre-commit = "^3.3.3" testing-postgresql = "^1.3.0" From 6ed417d899f731239774f26c981e0295d0ac869b Mon Sep 17 00:00:00 2001 From: Tim Band Date: Sat, 26 Apr 2025 22:11:28 +0100 Subject: [PATCH 62/85] Cope with nulls in config.yaml --- sqlsynthgen/interactive.py | 26 +++++++++------ tests/test_interactive.py | 66 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 83 insertions(+), 9 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 2b589a23..17ebd05c 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -65,7 +65,7 @@ class DbCmd(ABC, cmd.Cmd): ERROR_NO_SUCH_TABLE = "Error: '{0}' is not the name of a table in this database" @abstractmethod - def make_table_entry(self, name: str) -> TableEntry: + def make_table_entry(self, name: str, table_config: Mapping) -> TableEntry: ... def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): @@ -73,8 +73,14 @@ def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Ma self.config = config self.metadata = metadata self.table_entries: list[TableEntry] = [] + tables_config: Mapping = config.get("tables", {}) + if type(tables_config) is not dict: + tables_config = {} for name in metadata.tables.keys(): - entry = self.make_table_entry(name) + table_config = tables_config.get(name, {}) + if type(table_config) is not dict: + table_config = {} + entry = self.make_table_entry(name, table_config) if entry is not None: self.table_entries.append(entry) self.table_index = 0 @@ -134,9 +140,7 @@ class TableCmd(DbCmd): prompt = "(tableconf) " file = None - def make_table_entry(self, name: str) -> TableEntry: - tables = self.config.get("tables", {}) - table = tables.get(name, {}) + def make_table_entry(self, name: str, table: Mapping) -> TableEntry: if table.get("ignore", False): return TableCmdTableEntry(name, TableType.IGNORE, TableType.IGNORE) if table.get("vocabulary_table", False): @@ -161,9 +165,13 @@ def set_type(self, t_type: TableType): entry.new_type = t_type def _copy_entries(self) -> None: tables = self.config.get("tables", {}) + if type(tables) is not dict: + tables = {} for entry in self.table_entries: if entry.old_type != entry.new_type: table: dict = tables.get(entry.name, {}) + if type(table) is not dict: + table = {} if entry.new_type == TableType.IGNORE: table["ignore"] = True table.pop("vocabulary_table", None) @@ -364,9 +372,7 @@ class GeneratorCmd(DbCmd): SECONDARY_PRIVATE_TEXT = "Secondary Private on columns {0}" NOT_PRIVATE_TEXT = "Not private" - def make_table_entry(self, table_name: str) -> TableEntry | None: - tables = self.config.get("tables", {}) - table: str = tables.get(table_name, {}) + def make_table_entry(self, table_name: str, table: Mapping) -> TableEntry | None: if table.get("ignore", False): return None if table.get("vocabulary_table", False): @@ -506,6 +512,8 @@ def _remove_auto_src_stats(self) -> list[dict[str, any]]: def _copy_entries(self) -> None: src_stats = self._remove_auto_src_stats() tables = self.config.get("tables", {}) + if type(tables) is not dict: + tables = {} tes: list[GeneratorCmdTableEntry] = self.table_entries for entry in tes: rgs = [] @@ -527,7 +535,7 @@ def _copy_entries(self) -> None: if kwn: rg["kwargs"] = kwn rgs.append(rg) - if entry.name not in tables: + if type(tables.get(entry.name, None)) is not dict: tables[entry.name] = {} aq = self._get_aggregate_query(new_gens, entry.name) if aq: diff --git a/tests/test_interactive.py b/tests/test_interactive.py index ddd1b1e7..cd14fb90 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -77,6 +77,40 @@ def test_column_display(self) -> None: tc.do_columns("") self.assertListEqual(tc.column_items, [["id", "a", "b", "c"]]) + def test_null_configuration(self) -> None: + """A table still works if its configuration is None.""" + metadata = MetaData() + metadata.reflect(self.engine) + config = { + "tables": None, + } + with TestTableCmd(self.dsn, self.schema_name, metadata, config) as tc: + tc.do_next("unique_constraint_test") + tc.do_private("") + tc.do_quit("") + tables = tc.config["tables"] + self.assertFalse(tables["unique_constraint_test"].get("vocabulary_table", False)) + self.assertFalse(tables["unique_constraint_test"].get("ignore", False)) + self.assertTrue(tables["unique_constraint_test"].get("primary_private", False)) + + def test_null_table_configuration(self) -> None: + """A table still works if its configuration is None.""" + metadata = MetaData() + metadata.reflect(self.engine) + config = { + "tables": { + "unique_constraint_test": None, + }, + } + with TestTableCmd(self.dsn, self.schema_name, metadata, config) as tc: + tc.do_next("unique_constraint_test") + tc.do_private("") + tc.do_quit("") + tables = tc.config["tables"] + self.assertFalse(tables["unique_constraint_test"].get("vocabulary_table", False)) + self.assertFalse(tables["unique_constraint_test"].get("ignore", False)) + self.assertTrue(tables["unique_constraint_test"].get("primary_private", False)) + def test_configure_tables(self) -> None: """Test that we can change columns to ignore, vocab or normal.""" metadata = MetaData() @@ -224,6 +258,38 @@ class ConfigureGeneratorsTests(RequiresDBTestCase): database_name = "instrument" schema_name = "public" + def test_null_configuration(self): + """ Test that a table having null configuration does not break. """ + metadata = MetaData() + metadata.reflect(self.engine) + config = { + "tables": None, + } + with TestGeneratorCmd(self.dsn, self.schema_name, metadata, config) as gc: + TABLE = "model" + gc.do_next(f"{TABLE}.name") + gc.do_propose("") + gc.do_set("1") + gc.do_quit("") + self.assertEqual(len(gc.config["tables"][TABLE]["row_generators"]), 1) + + def test_null_table_configuration(self): + """ Test that a table having null configuration does not break. """ + metadata = MetaData() + metadata.reflect(self.engine) + config = { + "tables": { + "model": None, + } + } + with TestGeneratorCmd(self.dsn, self.schema_name, metadata, config) as gc: + TABLE = "model" + gc.do_next(f"{TABLE}.name") + gc.do_propose("") + gc.do_set("1") + gc.do_quit("") + self.assertEqual(len(gc.config["tables"][TABLE]["row_generators"]), 1) + def test_set_generator_mimesis(self): """ Test that we can set one generator to a mimesis generator. """ metadata = MetaData() From 88fb03d9036ed65a5279e4f5c7e151a02da27fa3 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Sat, 26 Apr 2025 22:58:20 +0100 Subject: [PATCH 63/85] More robustness against null table configuration --- sqlsynthgen/interactive.py | 36 +++++++++++++++++++----------------- sqlsynthgen/utils.py | 2 ++ tests/test_interactive.py | 1 + 3 files changed, 22 insertions(+), 17 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 17ebd05c..19eb4722 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -128,6 +128,18 @@ def table_name(self): return self.table_entries[self.table_index].name def table_metadata(self) -> Table: return self.metadata.tables[self.table_name()] + def get_table_config(self, table_name: str) -> dict[str, any]: + ts = self.config.get("tables", None) + if type(ts) is not dict: + return {} + t = ts.get(table_name) + return t if type(t) is dict else {} + def set_table_config(self, table_name: str, config: dict[str, any]): + ts = self.config.get("tables", None) + if type(ts) is not dict: + self.config["tables"] = {table_name: config} + return + ts[table_name] = config @dataclass @@ -164,14 +176,9 @@ def set_type(self, t_type: TableType): entry = self.table_entries[self.table_index] entry.new_type = t_type def _copy_entries(self) -> None: - tables = self.config.get("tables", {}) - if type(tables) is not dict: - tables = {} for entry in self.table_entries: if entry.old_type != entry.new_type: - table: dict = tables.get(entry.name, {}) - if type(table) is not dict: - table = {} + table = self.get_table_config(entry.name) if entry.new_type == TableType.IGNORE: table["ignore"] = True table.pop("vocabulary_table", None) @@ -188,8 +195,7 @@ def _copy_entries(self) -> None: table.pop("ignore", None) table.pop("vocabulary_table", None) table.pop("primary_private", None) - tables[entry.name] = table - self.config["tables"] = tables + self.set_table_config(entry.name, table) def do_quit(self, _arg): "Check the updates, save them if desired and quit the configurer." @@ -511,9 +517,6 @@ def _remove_auto_src_stats(self) -> list[dict[str, any]]: def _copy_entries(self) -> None: src_stats = self._remove_auto_src_stats() - tables = self.config.get("tables", {}) - if type(tables) is not dict: - tables = {} tes: list[GeneratorCmdTableEntry] = self.table_entries for entry in tes: rgs = [] @@ -535,19 +538,18 @@ def _copy_entries(self) -> None: if kwn: rg["kwargs"] = kwn rgs.append(rg) - if type(tables.get(entry.name, None)) is not dict: - tables[entry.name] = {} aq = self._get_aggregate_query(new_gens, entry.name) if aq: src_stats.append({ "name": f"auto__{entry.name}", "query": aq, }) + table_config = self.get_table_config(entry.name) if rgs: - tables[entry.name]["row_generators"] = rgs - elif "row_generators" in tables[entry.name]: - del tables[entry.name]["row_generators"] - self.config["tables"] = tables + table_config["row_generators"] = rgs + elif "row_generators" in table_config: + del table_config["row_generators"] + self.set_table_config(entry.name, table_config) self.config["src-stats"] = src_stats def do_quit(self, _arg): diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index cd7bde3e..be45ffb5 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -256,6 +256,8 @@ def table_is_private(config: Mapping, table_name: str) -> bool: according to config. """ ts = config.get("tables", {}) + if type(ts) is not dict: + return False t = ts.get(table_name, {}) return t.get("primary_private", False) diff --git a/tests/test_interactive.py b/tests/test_interactive.py index cd14fb90..df4a4ad9 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -269,6 +269,7 @@ def test_null_configuration(self): TABLE = "model" gc.do_next(f"{TABLE}.name") gc.do_propose("") + gc.do_compare("") gc.do_set("1") gc.do_quit("") self.assertEqual(len(gc.config["tables"][TABLE]["row_generators"]), 1) From 54652c55835d4bd998a2077cefd8aec82557516f Mon Sep 17 00:00:00 2001 From: Tim Band Date: Mon, 28 Apr 2025 18:38:53 +0100 Subject: [PATCH 64/85] Dockerfile --- .dockerignore | 7 +++++++ Dockerfile | 12 ++++++++++++ docs/source/docker.rst | 37 +++++++++++++++++++++++++++++++++++++ 3 files changed, 56 insertions(+) create mode 100644 .dockerignore create mode 100644 Dockerfile create mode 100644 docs/source/docker.rst diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..3cc3438a --- /dev/null +++ b/.dockerignore @@ -0,0 +1,7 @@ +.* +*.yaml.gz +orm.yaml +config.yaml +src-stats.yaml +ssg.py +dist diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..dce395fb --- /dev/null +++ b/Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.13.3-alpine3.21 +RUN apk add bash poetry +WORKDIR /app +ADD . /app +RUN mkdir /pypoetry +ENV POETRY_VIRTUALENVS_PATH=/pypoetry/cache/virtualenv +ENV SHELL=/bin/bash +ENV HOME=/ +RUN poetry install +RUN poetry run sqlsynthgen --install-completion bash +WORKDIR /data +CMD ["poetry", "--directory=/app", "shell"] diff --git a/docs/source/docker.rst b/docs/source/docker.rst new file mode 100644 index 00000000..86db4a2a --- /dev/null +++ b/docs/source/docker.rst @@ -0,0 +1,37 @@ +Using Docker +============ + +Sqlsynthgen can be run in a docker container. You can build it locally or run it directly from Docker Hub. + +Building Docker locally +----------------------- + +This will build a Docker image locally called ``ssg``: + +.. code-block:: shell + docker build -t ssg . + +Running sqlsynthgen in Docker +----------------------------- + +Let us run the image built above in a way that can access a source +database on the local machine (with DSN +``postgresql://tim:tim@localhost:5432/pagila`` and schema ``public``), +and stores the files produced in a directory called ``output``: + +.. code-block:: shell + mkdir output + docker run --rm --user $(id -u):$(id -g) --network host -e SRC_SCHEMA=public -e SRC_DSN=postgresql://tim:tim@localhost:5432/pagila -itv ./output:data ssg + +You do need to create the output folder first. + +You don't need ``--network host`` if the source database is not on the local +computer. + +Running the image in this way will give you a command prompt from which +sqlsynthgen can be called. Tab completion can be used. For example, if +you type ``sq mat`` you will see +``sqlsynthgen make-tables``; although you might have to wait a second +or two after some of the ```` key presses for the completed text +to appear. Tab completion can also be used for command options such +as ``--force``. Press ```` twice to see a list of possible completions. From 695f17ed21bac14c814d571e2432c5a522ee69b5 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Mon, 28 Apr 2025 18:39:12 +0100 Subject: [PATCH 65/85] make-tables should not take config.yaml by default --- sqlsynthgen/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 8650ff04..349555c2 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -280,7 +280,7 @@ def make_stats( @app.command() def make_tables( - config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), + config_file: Optional[str] = Option(None, help="The configuration file, used if you want an orm.yaml lacking data for the ignored tables"), orm_file: str = Option(ORM_FILENAME, help="Path to write the ORM yaml file to"), force: bool = Option(False, help="Overwrite any existing orm yaml file."), ) -> None: From 2ab807497ca7ec2957646ae4bc9387a89f5ed9a2 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 13 May 2025 12:39:55 +0100 Subject: [PATCH 66/85] initial configure-missing implementation --- sqlsynthgen/interactive.py | 289 +++++++++++++++++++++++++++++++++++-- sqlsynthgen/main.py | 31 +++- 2 files changed, 308 insertions(+), 12 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 19eb4722..cadf025d 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -140,6 +140,14 @@ def set_table_config(self, table_name: str, config: dict[str, any]): self.config["tables"] = {table_name: config} return ts[table_name] = config + def _remove_prefix_src_stats(self, prefix: str) -> list[dict[str, any]]: + src_stats = self.config.get("src-stats", []) + new_src_stats = [] + for stat in src_stats: + if not stat.get("name", "").startswith(prefix): + new_src_stats.append(stat) + self.config["src-stats"] = new_src_stats + return new_src_stats @dataclass @@ -226,13 +234,13 @@ def do_list(self, arg): new = entry.new_type becomes = " " if old == new else "->" + TYPE_LETTER[new] self.print("{0}{1} {2}", TYPE_LETTER[old], becomes, entry.name) - def do_next(self, _arg): + def do_next(self, arg): "'next' = go to the next table, 'next tablename' = go to table 'tablename'" - if _arg: + if arg: # Find the index of the table called _arg, if any - index = next((i for i,entry in enumerate(self.table_entries) if entry.name == _arg), None) + index = next((i for i,entry in enumerate(self.table_entries) if entry.name == arg), None) if index is None: - self.print(self.ERROR_NO_SUCH_TABLE, _arg) + self.print(self.ERROR_NO_SUCH_TABLE, arg) return self.set_table_index(index) return @@ -350,12 +358,277 @@ def print_row_data(self, count: int): return self.print_results(result) + def update_config_tables(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): with TableCmd(src_dsn, src_schema, metadata, config) as tc: tc.cmdloop() return tc.config +@dataclass +class MissingnessType: + SAMPLE="sample" + SAMPLE_QUERY=( + "SELECT COUNT(*) AS _row_count, {result_names} FROM " + "(SELECT {column_is_nulls} FROM {table} ORDER BY RANDOM() LIMIT {count})" + " GROUP BY {result_names}" + ) + name: str + query: str + columns: list[str] + @classmethod + def sample_query(cls, table, count, column_names): + result_names = ", ".join([ + "{0}__is_null".format(c) + for c in column_names + ]) + column_is_nulls = ", ".join([ + "{0} IS NULL AS {0}__is_null".format(c) + for c in column_names + ]) + return cls.SAMPLE_QUERY.format( + result_names=result_names, + column_is_nulls=column_is_nulls, + table=table, + count=count, + ) + + +@dataclass +class MissingnessCmdTableEntry(TableEntry): + old_type: MissingnessType + new_type: MissingnessType + + +class MissingnessCmd(DbCmd): + intro = "Interactive missingness configuration. Type ? for help.\n" + prompt = "(missingness) " + file = None + PATTERN_RE = re.compile(r'SRC_STATS\["([^"]*)"\]') + + def get_nonnull_columns(self, table_name: str): + metadata_table = self.metadata.tables[table_name] + return [ + str(name) + for name, column in metadata_table.columns.items() + if column.nullable + ] + def find_missingness_query(self, missingness_generator: Mapping): + kwargs = missingness_generator.get("kwargs", {}) + patterns = kwargs.get("patterns", "") + pattern_match = self.PATTERN_RE.match(patterns) + if pattern_match: + key = pattern_match.group(1) + for src_stat in self.config["src-stats"]: + if src_stat.get("name") == key: + return src_stat.get("query", None) + return None + def make_table_entry(self, name: str, table: Mapping) -> TableEntry: + mgs = table.get("missingness_generators", []) + old = None + nonnull_columns = self.get_nonnull_columns(name) + if not nonnull_columns: + return None + if not mgs: + old = MissingnessType( + name="none", + query="", + columns=[], + ) + elif len(mgs) == 1: + mg = mgs[0] + mg_name = mg.get("name", None) + if mg_name is not None: + query = self.find_missingness_query(mg) + if query is not None: + old = MissingnessType( + name=mg_name, + query=query, + columns=mg.get("columns_assigned", []), + ) + if old is None: + return None + return MissingnessCmdTableEntry( + name=name, + old_type=old, + new_type=old, + ) + + def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): + super().__init__(src_dsn, src_schema, metadata, config) + self.set_prompt() + + def set_prompt(self): + if self.table_index < len(self.table_entries): + entry: MissingnessCmdTableEntry = self.table_entries[self.table_index] + nt = entry.new_type + if nt is None: + self.prompt = "(missingness for {0}) ".format(entry.name) + else: + self.prompt = "(missingness for {0}: {1}) ".format(entry.name, nt.name) + else: + self.prompt = "(missingness) " + def set_type(self, t_type: TableType): + if self.table_index < len(self.table_entries): + entry = self.table_entries[self.table_index] + entry.new_type = t_type + def _copy_entries(self) -> None: + src_stats = self._remove_prefix_src_stats("missing_auto__") + for entry in self.table_entries: + entry: MissingnessCmdTableEntry + table = self.get_table_config(entry.name) + if entry.new_type is None or entry.new_type.name == "none": + table.pop("missingness_generators", None) + else: + src_stat_key = "missing_auto__{0}__0".format(entry.name) + table["missingness_generators"] = [{ + "name": entry.new_type.name, + "kwargs": {"patterns": 'SRC_STATS["{0}"]'.format(src_stat_key)}, + "columns": entry.new_type.columns, + }] + src_stats.append({ + "name": src_stat_key, + "query": entry.new_type.query, + }) + self.set_table_config(entry.name, table) + + def do_quit(self, _arg): + "Check the updates, save them if desired and quit the configurer." + count = 0 + for entry in self.table_entries: + if entry.old_type != entry.new_type: + count += 1 + if entry.old_type is None: + self.print("Putting generator {0} on table {1}", entry.name, entry.new_type.name) + elif entry.new_type is None: + self.print("Deleting generator {1} from table {0}", entry.name, entry.old_type.name) + else: + self.print( + "Changing {0} from {1} to {2}", + entry.name, + entry.old_type.name, + entry.new_type.name, + ) + if count == 0: + self.print("There are no changes.") + return True + reply = self.ask_save() + if reply == "yes": + self._copy_entries() + return True + if reply == "no": + return True + return False + def do_list(self, arg): + "list the tables with their types" + for entry in self.table_entries: + old = "-" if entry.old_type is None else entry.old_type.name + new = "-" if entry.new_type is None else entry.new_type.name + desc = new if old == new else "{0}->{1}".format(old, new) + self.print("{0} {1}", entry.name, desc) + def do_next(self, arg): + "'next' = go to the next table, 'next tablename' = go to table 'tablename'" + if arg: + # Find the index of the table called _arg, if any + index = next((i for i,entry in enumerate(self.table_entries) if entry.name == arg), None) + if index is None: + self.print(self.ERROR_NO_SUCH_TABLE, arg) + return + self.set_table_index(index) + return + self.next_table(self.ERROR_NO_MORE_TABLES) + def complete_next(self, text, line, begidx, endidx): + return [ + entry.name + for entry in self.table_entries + if entry.name.startswith(text) + ] + def do_previous(self, _arg): + "Go to the previous table" + if not self.set_table_index(self.table_index - 1): + self.print(self.ERROR_ALREADY_AT_START) + def _set_type(self, name, query): + if len(self.table_entries) <= self.table_index: + return + entry: MissingnessCmdTableEntry = self.table_entries[self.table_index] + entry.new_type = MissingnessType( + name=name, + query=query, + columns=entry.old_type.columns, + ) + def _set_none(self): + if len(self.table_entries) <= self.table_index: + return + entry: MissingnessCmdTableEntry = self.table_entries[self.table_index] + entry.new_type = None + def do_sample(self, arg: str): + """ + Set the current table missingness as sample, and go to the next table. + "sample 3000" means sample 3000 rows at random and choose the missingness + to be the same as one of those 3000 at random. + "sample" means the same, but with a default number of rows sampled (1000). + """ + if len(self.table_entries) <= self.table_index: + self.print("Error! not on a table") + return + entry: MissingnessCmdTableEntry = self.table_entries[self.table_index] + if arg == "": + count = 1000 + elif arg.isdecimal(): + count = int(arg) + else: + self.print("Error: sample can be used alone or with an integer argument. {0} is not permitted", arg) + return + self._set_type( + MissingnessType.SAMPLE, + MissingnessType.sample_query( + entry.name, + count, + self.get_nonnull_columns(entry.name), + ), + ) + self.print("Table {} set to sampled missingness", self.table_name()) + self.next_table() + def do_none(self, _arg): + "Set the current table to have no missingness, and go to the next table" + self._set_none() + self.print("Table {} set to have no missingness", self.table_name()) + self.next_table() + def do_counts(self, _arg): + "Report the column names with the counts of nulls in them" + if len(self.table_entries) <= self.table_index: + return + table_name = self.table_entries[self.table_index].name + nonnull_columns = self.get_nonnull_columns(table_name) + colcounts = [ + ", COUNT({0}) AS {0}".format(nnc) + for nnc in nonnull_columns + ] + with self.engine.connect() as connection: + result = connection.execute( + text("SELECT COUNT(*) AS _total_row_count{colcounts} FROM {table}".format( + table=table_name, + colcounts="".join(colcounts), + )) + ).first() + if result is None: + self.print("Could not count rows in table {0}", table_name) + return + row_count = result._total_row_count + self.print("Total row count: {}", row_count) + self.print_table(["Column", "NULL count"], [ + [name, row_count - count] + for name, count in result._mapping.items() + if name != "_total_row_count" + ]) + + +def update_missingness(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): + with MissingnessCmd(src_dsn, src_schema, metadata, config) as mc: + mc.cmdloop() + return mc.config + + @dataclass class GeneratorInfo: column: str @@ -507,13 +780,7 @@ def set_prompt(self): ) def _remove_auto_src_stats(self) -> list[dict[str, any]]: - src_stats = self.config.get("src-stats", []) - new_src_stats = [] - for stat in src_stats: - if not stat.get("name", "").startswith("auto__"): - new_src_stats.append(stat) - self.config["src-stats"] = new_src_stats - return new_src_stats + return self._remove_prefix_src_stats("auto__") def _copy_entries(self) -> None: src_stats = self._remove_auto_src_stats() diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 349555c2..81fbd06a 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -13,7 +13,11 @@ from typer import Argument, Option, Typer from sqlsynthgen.create import create_db_data, create_db_tables, create_db_vocab -from sqlsynthgen.interactive import update_config_tables, update_config_generators +from sqlsynthgen.interactive import ( + update_config_tables, + update_config_generators, + update_missingness, +) from sqlsynthgen.make import ( make_src_stats, make_table_generators, @@ -355,6 +359,31 @@ def configure_tables( logger.debug("Tables configured in %s.", config_file) +@app.command() +def configure_missing( + config_file: Optional[str] = Option(CONFIG_FILENAME, help="Path to write the configuration file to"), + orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), +): + """ + Interactively set the missingness of the generated data. + """ + logger.debug("Configuring missingness in %s.", config_file) + settings = get_settings() + src_dsn: str = _require_src_db_dsn(settings) + config_file_path = Path(config_file) + config = {} + if config_file_path.exists(): + config = yaml.load(config_file_path.read_text(encoding="UTF-8"), Loader=yaml.SafeLoader) + metadata = load_metadata(orm_file, config) + config_updated = update_missingness(src_dsn, settings.src_schema, metadata, config) + if config_updated is None: + logger.debug("Cancelled") + return + content = yaml.dump(config_updated) + config_file_path.write_text(content, encoding="utf-8") + logger.debug("Generators missingness in %s.", config_file) + + @app.command() def configure_generators( config_file: Optional[str] = Option(CONFIG_FILENAME, help="Path of the configuration file to alter"), From c2c308fde7efa5454ffe07463c279c0c3190a34a Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 13 May 2025 19:14:46 +0100 Subject: [PATCH 67/85] initial missingness_generators implementation --- sqlsynthgen/base.py | 19 +++ sqlsynthgen/interactive.py | 28 ++--- sqlsynthgen/json_schemas/config_schema.json | 28 +++-- sqlsynthgen/make.py | 132 ++++---------------- sqlsynthgen/templates/ssg.py.j2 | 9 +- 5 files changed, 77 insertions(+), 139 deletions(-) diff --git a/sqlsynthgen/base.py b/sqlsynthgen/base.py index 470b34fe..4e947cfe 100644 --- a/sqlsynthgen/base.py +++ b/sqlsynthgen/base.py @@ -120,3 +120,22 @@ def load(self, connection: Connection) -> None: logger.warning( "Error inserting rows into table %s: %s", self.table.fullname, e ) + +class ColumnPresence: + def __init__(self): + self.rng = numpy.random.default_rng() + def sampled(self, patterns): + total = 0 + for pattern in patterns: + total += pattern.get("row_count", 0) + s = self.rng.integers(total) + for pattern in patterns: + s -= pattern.get("row_count", 0) + if s < 0: + cs = set() + for column, nullness in pattern.items(): + if not nullness and column.endswith("__is_null"): + cs.add(column[:-9]) + return cs + logger.error("failed to sample patterns") + return set() diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index cadf025d..7c68db9d 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -367,8 +367,8 @@ def update_config_tables(src_dsn: str, src_schema: str, metadata: MetaData, conf @dataclass class MissingnessType: - SAMPLE="sample" - SAMPLE_QUERY=( + SAMPLED="column_presence.sampled" + SAMPLED_QUERY=( "SELECT COUNT(*) AS _row_count, {result_names} FROM " "(SELECT {column_is_nulls} FROM {table} ORDER BY RANDOM() LIMIT {count})" " GROUP BY {result_names}" @@ -377,7 +377,7 @@ class MissingnessType: query: str columns: list[str] @classmethod - def sample_query(cls, table, count, column_names): + def sampled_query(cls, table, count, column_names): result_names = ", ".join([ "{0}__is_null".format(c) for c in column_names @@ -386,7 +386,7 @@ def sample_query(cls, table, count, column_names): "{0} IS NULL AS {0}__is_null".format(c) for c in column_names ]) - return cls.SAMPLE_QUERY.format( + return cls.SAMPLED_QUERY.format( result_names=result_names, column_is_nulls=column_is_nulls, table=table, @@ -561,12 +561,12 @@ def _set_none(self): return entry: MissingnessCmdTableEntry = self.table_entries[self.table_index] entry.new_type = None - def do_sample(self, arg: str): + def do_sampled(self, arg: str): """ - Set the current table missingness as sample, and go to the next table. - "sample 3000" means sample 3000 rows at random and choose the missingness + Set the current table missingness as 'sampled', and go to the next table. + "sampled 3000" means sample 3000 rows at random and choose the missingness to be the same as one of those 3000 at random. - "sample" means the same, but with a default number of rows sampled (1000). + "sampled" means the same, but with a default number of rows sampled (1000). """ if len(self.table_entries) <= self.table_index: self.print("Error! not on a table") @@ -577,11 +577,11 @@ def do_sample(self, arg: str): elif arg.isdecimal(): count = int(arg) else: - self.print("Error: sample can be used alone or with an integer argument. {0} is not permitted", arg) + self.print("Error: sampled can be used alone or with an integer argument. {0} is not permitted", arg) return self._set_type( - MissingnessType.SAMPLE, - MissingnessType.sample_query( + MissingnessType.SAMPLED, + MissingnessType.sampled_query( entry.name, count, self.get_nonnull_columns(entry.name), @@ -606,7 +606,7 @@ def do_counts(self, _arg): ] with self.engine.connect() as connection: result = connection.execute( - text("SELECT COUNT(*) AS _total_row_count{colcounts} FROM {table}".format( + text("SELECT COUNT(*) AS row_count{colcounts} FROM {table}".format( table=table_name, colcounts="".join(colcounts), )) @@ -614,12 +614,12 @@ def do_counts(self, _arg): if result is None: self.print("Could not count rows in table {0}", table_name) return - row_count = result._total_row_count + row_count = result.row_count self.print("Total row count: {}", row_count) self.print_table(["Column", "NULL count"], [ [name, row_count - count] for name, count in result._mapping.items() - if name != "_total_row_count" + if name != "row_count" ]) diff --git a/sqlsynthgen/json_schemas/config_schema.json b/sqlsynthgen/json_schemas/config_schema.json index 5033fda4..2d985e4f 100644 --- a/sqlsynthgen/json_schemas/config_schema.json +++ b/sqlsynthgen/json_schemas/config_schema.json @@ -190,15 +190,27 @@ } } }, - "unions": { - "description": "Groups of columns that represent different representations of the same value.", - "type": "object", - "patternProperties": { - ".*": { - "type": "array", - "items": { - "description": "Column name.", + "missingness_generators": { + "description": "Function to generate a set of nullable columns that should not be null", + "type": "array", + "items": { + "type": "object", + "required": ["name"], + "properties": { + "name": { + "description": "The name of a (built-in or custom) function (e.g. column_presence.sampled).", "type": "string" + }, + "kwargs": { + "description": "Keyword arguments to pass to the function.", + "type": "object" + }, + "columns_assigned": { + "description": "Column names that might be returned.", + "type": "array", + "items": { + "type": "string" + } } } } diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 7430f69d..a38197d0 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -74,117 +74,24 @@ class RowGeneratorInfo: @dataclass class ColumnChoice: """ Chooses columns based on a random number in [0,1) """ - options: list[tuple[float, set[str]]] - - def all_columns(self) -> set[str]: - """ Returns the set of all columns known """ - cols: set[str] = set() - for (_, cs) in self.options: - cols.update(cs) - return cols - - def choose(self, p: float) -> set[str]: - """ - Returns a set of columns that should have non-null values set. - - p is a random number 0 <= p < 1 upon which this choice is made. - """ - for (cumulative_probability, values) in self.options: - if p < cumulative_probability: - return values - return [] - - @classmethod - def make( - _cls, - cols: Iterable[str], - dependent_columns: dict[str, Iterable[str]], - row_count: int | None, - value_count: dict[str, int], - ) -> Self: - """ - Makes a ColumnChoice out of a union - - cols: the columns in the union - dependent_columns: a dict whose keys are a subset of cols, and - whose values are the names of the columns (including the key!) - that share row generators with this column (if any). - row_count: The total number of rows in the table, if known. - value_count: A dict whose keys are a subset of cols, and whose - values are the number of nonnull values in this column. - Columns for which this number is not known are not in the - keys of this dict. - """ - total_value_count = 0 - counted_column_count = 0 - for col in cols: - vc = value_count.get(col, None) - if vc is not None: - counted_column_count += 1 - total_value_count += vc - # work out what proportion to assign to uncounted columns - if row_count is None: - if counted_column_count == 0: - default_count = 1 - row_count = len(cols) - else: - default_count = total_value_count / counted_column_count - row_count = default_count * len(cols) - elif counted_column_count == len(cols): - default_count = 0 - else: - default_count = row_count / (len(cols) - counted_column_count) - cumulative_count = 0 - choice = ColumnChoice(options=[]) - for col in cols: - cumulative_count += value_count.get(col, default_count) - proportion = cumulative_count / row_count - if col in dependent_columns: - choice.options.append((proportion, dependent_columns[col])) - else: - choice.options.append((proportion, {col})) - return choice + function_name: str + argument_values: list[str] def make_column_choices( - table_name: str, table_config: Mapping[str, Any], - src_stats: Mapping[str, Any], ) -> list[ColumnChoice]: - # each union is a dict of union names to a list of its columns - unions: dict[str, list[str]] = get_property(table_config, "unions", {}) - # Set of all columns that are part of a union - columns_in_union: set[str] = set() - for (union_name, cols) in unions.items(): - for col in cols: - if col in columns_in_union: - logger.warning("union %s overlaps with another union in table %s", union_name, table_name) - columns_in_union.add(col) - # Now we find row_generators that overlap (by one only!) with unions: - # the columns in these generators must be null (or not null) together. - dependent_columns: dict[str, set[str]] = {} - for row_gen in get_property(table_config, "row_generators", []): - assigned = row_gen["columns_assigned"] - if type(assigned) is list: - assigned_set = set(assigned) - intersection = assigned_set.intersection(columns_in_union) - n = len(intersection) - if 1 < n: - logger.warning( - "row generator %s in table %s supplies columns for multiple unions", - row_gen["name"], - table_name, - ) - elif 1 == n: - u = intersection.pop() - dependent_columns[u] = assigned_set - # Now we can convert unions to ColumnChoices - choices: list[ColumnChoice] = [] - for cols in unions.values(): - choices.append( - ColumnChoice.make(cols, dependent_columns) + return [ + ColumnChoice( + function_name=mg["name"], + argument_values=[ + f"{k}={v}" + for k, v in mg.get("kwargs", {}).items() + ] ) - return choices + for mg in table_config.get("missingness_generators", []) + if "name" in mg + ] @dataclass @@ -530,7 +437,6 @@ def __init__(self, *columns: Column, name: str): def _get_generator_for_table( table_config: Mapping[str, Any], table: Table, - src_stats: Mapping[str, Any]=None ) -> TableGeneratorInfo: """Get generator information for the given table.""" unique_constraints = sorted( @@ -550,10 +456,15 @@ def _get_generator_for_table( *primary_keys, name=f"{table.name}_primary_key" )) - column_choices = make_column_choices(table.name, table_config, src_stats) - nonnull_columns={str(col.name) for col in table.columns} - for cc in column_choices: - nonnull_columns.difference_update(cc.all_columns()) + column_choices = make_column_choices(table_config) + if column_choices: + nonnull_columns = { + str(col.name) + for col in table.columns + if not table.columns[col.name].nullable + } + else: + nonnull_columns = {str(col.name) for col in table.columns} table_data: TableGeneratorInfo = TableGeneratorInfo( table_name=table.name, class_name=table.name.title() + "Generator", @@ -672,7 +583,6 @@ def make_table_generators( # pylint: disable=too-many-locals tables.append(_get_generator_for_table( tables_config.get(table.name, {}), table, - src_stats, )) story_generators = _get_story_generators(config) diff --git a/sqlsynthgen/templates/ssg.py.j2 b/sqlsynthgen/templates/ssg.py.j2 index 1d5bff20..157a7472 100644 --- a/sqlsynthgen/templates/ssg.py.j2 +++ b/sqlsynthgen/templates/ssg.py.j2 @@ -2,13 +2,14 @@ from mimesis import Generic, Numeric, Person from mimesis.locales import Locale import sqlalchemy -from sqlsynthgen.base import FileUploader, TableGenerator, DistributionGenerator +from sqlsynthgen.base import FileUploader, TableGenerator, DistributionGenerator, ColumnPresence from sqlsynthgen.main import load_metadata generic = Generic(locale=Locale.EN_GB) numeric = Numeric() person = Person() dist_gen = DistributionGenerator() +column_presence = ColumnPresence() from sqlsynthgen.providers import ( {% for provider_import in provider_imports %} @@ -63,11 +64,7 @@ class {{ table_data.class_name }}(TableGenerator): result = {} columns_to_generate = set({{ table_data.nonnull_columns }}) {% for choice in table_data.column_choices %} - p = get_random() - {% for probability, columns in choice.options %} - {%+ if not loop.first %}el{%+ endif %}if p < {{ probability }}: - columns_to_generate.update(set({{ columns }})) - {% endfor %} + columns_to_generate.update({{ choice.function_name }}({{ choice.argument_values| join(", ") }})) {% endfor %} {% if max_unique_constraint_tries is not none %} max_tries={{max_unique_constraint_tries}}, From da0e165c76035aca38a72650b7d903bf80804685 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 14 May 2025 11:33:45 +0100 Subject: [PATCH 68/85] First test for configure-missing --- sqlsynthgen/interactive.py | 5 ++-- tests/examples/instrument.sql | 40 +++++++++++++++++++++++++++++-- tests/test_interactive.py | 45 +++++++++++++++++++++++++++++++---- 3 files changed, 82 insertions(+), 8 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 7c68db9d..004aab3a 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -404,6 +404,7 @@ class MissingnessCmd(DbCmd): intro = "Interactive missingness configuration. Type ? for help.\n" prompt = "(missingness) " file = None + ROW_COUNT_MSG = "Total row count: {}" PATTERN_RE = re.compile(r'SRC_STATS\["([^"]*)"\]') def get_nonnull_columns(self, table_name: str): @@ -554,7 +555,7 @@ def _set_type(self, name, query): entry.new_type = MissingnessType( name=name, query=query, - columns=entry.old_type.columns, + columns=self.get_nonnull_columns(entry.name), ) def _set_none(self): if len(self.table_entries) <= self.table_index: @@ -615,7 +616,7 @@ def do_counts(self, _arg): self.print("Could not count rows in table {0}", table_name) return row_count = result.row_count - self.print("Total row count: {}", row_count) + self.print(self.ROW_COUNT_MSG, row_count) self.print_table(["Column", "NULL count"], [ [name, row_count - count] for name, count in result._mapping.items() diff --git a/tests/examples/instrument.sql b/tests/examples/instrument.sql index c61598a1..836b4264 100644 --- a/tests/examples/instrument.sql +++ b/tests/examples/instrument.sql @@ -26,7 +26,7 @@ CREATE TABLE public.model ( ALTER TABLE ONLY public.model ADD CONSTRAINT model_pkey PRIMARY KEY (id); ALTER TABLE ONLY public.model - ADD CONSTRAINT concept_manufacturer_id_fkey FOREIGN KEY (manufacturer_id) REFERENCES public.manufacturer(id); + ADD CONSTRAINT manufacturer_id_fkey FOREIGN KEY (manufacturer_id) REFERENCES public.manufacturer(id); ALTER TABLE public.model OWNER TO postgres; @@ -43,7 +43,7 @@ CREATE TABLE public.string ( ALTER TABLE ONLY public.string ADD CONSTRAINT string_pkey PRIMARY KEY (id); ALTER TABLE ONLY public.string - ADD CONSTRAINT concept_model_id_fkey FOREIGN KEY (model_id) REFERENCES public.model(id); + ADD CONSTRAINT model_id_fkey FOREIGN KEY (model_id) REFERENCES public.model(id); ALTER TABLE public.string OWNER TO postgres; @@ -63,3 +63,39 @@ INSERT INTO public.string VALUES (13, 3, 3, 196); INSERT INTO public.string VALUES (14, 3, 4, 146.83); INSERT INTO public.string VALUES (15, 3, 5, 110); INSERT INTO public.string VALUES (16, 3, 6, 82.4); + +CREATE TABLE public.player ( + id INTEGER NOT NULL, + given_name TEXT NOT NULL, + family_name TEXT NOT NULL +); + +ALTER TABLE ONLY public.player ADD CONSTRAINT player_pkey PRIMARY KEY (id); + +ALTER TABLE public.player OWNER TO postgres; + +INSERT INTO public.player VALUES (1, 'Mark', 'Samson'); +INSERT INTO public.player VALUES (2, 'Tim', 'Friedman'); +INSERT INTO public.player VALUES (3, 'Pierre', 'Marchmont'); + +CREATE TABLE public.signature_model ( + id INTEGER NOT NULL, + name TEXT NOT NULL, + player_id INTEGER, + based_on INTEGER +); + +ALTER TABLE ONLY public.signature_model ADD CONSTRAINT signature_model_pkey PRIMARY KEY (id); +ALTER TABLE ONLY public.signature_model + ADD CONSTRAINT player_id_fkey FOREIGN KEY (player_id) REFERENCES public.player(id); +ALTER TABLE ONLY public.signature_model + ADD CONSTRAINT based_on_id_fkey FOREIGN KEY (based_on) REFERENCES public.model(id); + +ALTER TABLE public.signature_model OWNER TO postgres; + +INSERT INTO public.signature_model VALUES (1, 'Flame', 1, NULL); +INSERT INTO public.signature_model VALUES (2, 'Dragon', NULL, 1); +INSERT INTO public.signature_model VALUES (3, 'Veleno', 2, 2); +INSERT INTO public.signature_model VALUES (4, 'Grifter', NULL, NULL); +INSERT INTO public.signature_model VALUES (5, 'Proton', 3, 1); +INSERT INTO public.signature_model VALUES (6, 'Isabelle', NULL, 3); diff --git a/tests/test_interactive.py b/tests/test_interactive.py index df4a4ad9..4b93f22c 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -2,9 +2,8 @@ import copy import re from sqlalchemy import MetaData, select -from sqlalchemy.orm import declarative_base -from sqlsynthgen.interactive import DbCmd, TableCmd, GeneratorCmd +from sqlsynthgen.interactive import DbCmd, TableCmd, GeneratorCmd, MissingnessCmd from tests.utils import RequiresDBTestCase @@ -241,7 +240,7 @@ def test_list_tables(self): class TestGeneratorCmd(GeneratorCmd, TestDbCmdMixin): - """ TableCmd but mocked """ + """ GeneratorCmd but mocked """ def get_proposals(self) -> dict[str, tuple[int, str, str, list[str]]]: """ Returns a dict of generator name to a tuple of (index, fit_string, [list,of,samples])""" @@ -259,7 +258,7 @@ class ConfigureGeneratorsTests(RequiresDBTestCase): schema_name = "public" def test_null_configuration(self): - """ Test that a table having null configuration does not break. """ + """ Test that the tables having null configuration does not break. """ metadata = MetaData() metadata.reflect(self.engine) config = { @@ -567,3 +566,41 @@ def test_existing_configuration_remains(self): } self.assertEqual(src_stats["kraken"], config["src-stats"][0]["query"]) self.assertTrue(gc.config["tables"]["string"]["primary_private"]) + + +class TestMissingnessCmd(MissingnessCmd, TestDbCmdMixin): + """ MissingnessCmd but mocked """ + +class ConfigureMissingnessTests(RequiresDBTestCase): + """ Testing configure-missing. """ + dump_file_path = "instrument.sql" + database_name = "instrument" + schema_name = "public" + + def test_set_missingness_to_sampled(self): + """ Test that we can set one table to sampled missingness. """ + metadata = MetaData() + metadata.reflect(self.engine) + with TestMissingnessCmd(self.dsn, self.schema_name, metadata, {}) as mc: + TABLE = "signature_model" + mc.do_next(TABLE) + mc.do_counts("") + self.assertListEqual(mc.messages, [(MissingnessCmd.ROW_COUNT_MSG, (6,), {})]) + self.assertListEqual(mc.rows, [['player_id', 3], ['based_on', 2]]) + mc.do_sampled("") + mc.do_quit("") + self.assertDictEqual( + mc.config, + { "tables": {TABLE: {"missingness_generators": [{ + "columns": ["player_id", "based_on"], + "kwargs": {"patterns": 'SRC_STATS["missing_auto__signature_model__0"]'}, + "name": "column_presence.sampled", + }]}}, + "src-stats": [{ + "name": "missing_auto__signature_model__0", + "query": ("SELECT COUNT(*) AS _row_count, player_id__is_null, based_on__is_null FROM" + " (SELECT player_id IS NULL AS player_id__is_null, based_on IS NULL AS based_on__is_null FROM" + " signature_model ORDER BY RANDOM() LIMIT 1000) GROUP BY player_id__is_null, based_on__is_null") + }] + } + ) From 0b8b3de7d411835d3c054314d0e42ddddd457f99 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Wed, 14 May 2025 18:50:55 +0100 Subject: [PATCH 69/85] removed some cruft --- sqlsynthgen/make.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index a38197d0..19f27769 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -557,11 +557,6 @@ def make_table_generators( # pylint: disable=too-many-locals story_generator_module_name = config.get("story_generators_module", None) tables_config = config.get("tables", {}) - src_stats = {} - if src_stats_filename: - with open(src_stats_filename, "r", encoding="utf-8") as f: - src_stats = yaml.unsafe_load(f) - tables: list[TableGeneratorInfo] = [] vocabulary_tables: list[VocabularyTableGeneratorInfo] = [] vocab_names = get_vocabulary_table_names(config) From 8ce26a06e446c6930c80b1c3436bbb2682668283 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 15 May 2025 13:22:44 +0100 Subject: [PATCH 70/85] end-to-end missingness test --- sqlsynthgen/create.py | 20 +++++++++- sqlsynthgen/generators.py | 2 +- sqlsynthgen/interactive.py | 5 ++- sqlsynthgen/remove.py | 14 ++++++- tests/test_interactive.py | 82 +++++++++++++++++++++++++++++++++++++- 5 files changed, 117 insertions(+), 6 deletions(-) diff --git a/sqlsynthgen/create.py b/sqlsynthgen/create.py index e26f1534..010595b9 100644 --- a/sqlsynthgen/create.py +++ b/sqlsynthgen/create.py @@ -100,8 +100,26 @@ def create_db_data( dst_dsn: str = settings.dst_dsn or "" assert dst_dsn != "", "Missing DST_DSN setting." + return create_db_data_into( + sorted_tables, + table_generator_dict, + story_generator_list, + num_passes, + dst_dsn, + settings.dst_schema, + ) + + +def create_db_data_into( + sorted_tables: Sequence[Table], + table_generator_dict: Mapping[str, TableGenerator], + story_generator_list: Sequence[Mapping[str, Any]], + num_passes: int, + db_dsn: str, + schema_name: str | None, +) -> RowCounts: dst_engine = get_sync_engine( - create_db_engine(dst_dsn, schema_name=settings.dst_schema) + create_db_engine(db_dsn, schema_name=schema_name) ) row_counts: Counter[str] = Counter() diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index af219b4e..94aade82 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -344,7 +344,7 @@ def __init__(self, column: Column, engine: Engine, function_name: str): result = connection.execute( text(f"SELECT {self._min_year} AS start, {self._max_year} AS end FROM {column.table.name}") ).first() - if result is None: + if result is None or result.start is None or result.end is None: return None self._start = int(result.start) self._end = int(result.end) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 004aab3a..2b825426 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -100,8 +100,9 @@ def print_table(self, headings: list[str], rows: list[list[str]]): print(output) def print_table_by_columns(self, columns: dict[str, list[str]]): output = PrettyTable() + row_count = max([len(col) for col in columns.values()]) for field_name, data in columns.items(): - output.add_column(field_name, data) + output.add_column(field_name, data + [None] * (row_count - len(data))) print(output) def print_results(self, result): self.print_table( @@ -369,7 +370,7 @@ def update_config_tables(src_dsn: str, src_schema: str, metadata: MetaData, conf class MissingnessType: SAMPLED="column_presence.sampled" SAMPLED_QUERY=( - "SELECT COUNT(*) AS _row_count, {result_names} FROM " + "SELECT COUNT(*) AS row_count, {result_names} FROM " "(SELECT {column_is_nulls} FROM {table} ORDER BY RANDOM() LIMIT {count})" " GROUP BY {result_names}" ) diff --git a/sqlsynthgen/remove.py b/sqlsynthgen/remove.py index ba62b6cd..00845b0e 100644 --- a/sqlsynthgen/remove.py +++ b/sqlsynthgen/remove.py @@ -21,8 +21,20 @@ def remove_db_data( """Truncate the synthetic data tables but not the vocabularies.""" settings = get_settings() assert settings.dst_dsn, "Missing destination database settings" + remove_db_data_from( + metadata, + config, + settings.dst_dsn, + schema_name=settings.dst_schema + ) + + +def remove_db_data_from( + metadata: MetaData, config: Mapping[str, Any], db_dsn: str, schema_name: str | None +) -> None: + """Truncate the synthetic data tables but not the vocabularies.""" dst_engine = get_sync_engine( - create_db_engine(settings.dst_dsn, schema_name=settings.dst_schema) + create_db_engine(db_dsn, schema_name=schema_name) ) with dst_engine.connect() as dst_conn: diff --git a/tests/test_interactive.py b/tests/test_interactive.py index 4b93f22c..16d9976f 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -1,9 +1,21 @@ """Tests for the base module.""" +import asyncio import copy +import os +import random import re from sqlalchemy import MetaData, select +from tempfile import mkstemp +import yaml +from pydantic import BaseSettings + +from sqlsynthgen.create import create_db_data_into from sqlsynthgen.interactive import DbCmd, TableCmd, GeneratorCmd, MissingnessCmd +from sqlsynthgen.make import make_tables_file, make_src_stats, make_table_generators +from sqlsynthgen.remove import remove_db_data_from +from sqlsynthgen.utils import import_file, sorted_non_vocabulary_tables + from tests.utils import RequiresDBTestCase @@ -598,9 +610,77 @@ def test_set_missingness_to_sampled(self): }]}}, "src-stats": [{ "name": "missing_auto__signature_model__0", - "query": ("SELECT COUNT(*) AS _row_count, player_id__is_null, based_on__is_null FROM" + "query": ("SELECT COUNT(*) AS row_count, player_id__is_null, based_on__is_null FROM" " (SELECT player_id IS NULL AS player_id__is_null, based_on IS NULL AS based_on__is_null FROM" " signature_model ORDER BY RANDOM() LIMIT 1000) GROUP BY player_id__is_null, based_on__is_null") }] } ) + + def test_create_with_missingness(self): + """ Test that we can sample real missingness and reproduce it. """ + random.seed(45) + # Generate the `orm.yaml` from the database + (orm_fd, orm_file_path) = mkstemp(".yaml", "orm_", text=True) + (config_fd, config_file_path) = mkstemp(".yaml", "config_", text=True) + (stats_fd, stats_file_path) = mkstemp(".yaml", "src_stats_", text=True) + (ssg_fd, ssg_file_path) = mkstemp(".py", "ssg_", text=True) + schema = "public" + table_name = "signature_model" + with os.fdopen(orm_fd, "w", encoding="utf-8") as orm_fh: + orm_fh.write(make_tables_file(self.dsn, schema, {})) + # Configure the missingness + metadata = MetaData() + metadata.reflect(self.engine) + with TestMissingnessCmd(self.dsn, self.schema_name, metadata, {}) as mc: + mc.do_next(table_name) + mc.do_sampled("") + mc.do_quit("") + config = mc.config + # Save out the resulting configuration + with os.fdopen(config_fd, "w", encoding="utf-8") as config_fh: + config_fh.write(yaml.dump(config)) + # `make-stats` producing `src-stats.yaml` + loop = asyncio.new_event_loop() + src_stats = loop.run_until_complete( + make_src_stats(self.dsn, config, metadata, schema) + ) + loop.close() + with os.fdopen(stats_fd, "w", encoding="utf-8") as stats_fh: + stats_fh.write(yaml.dump(src_stats)) + # `make-generators` with `src-stats.yaml` and the rest, producing `ssg.py` + ssg_content = make_table_generators( + metadata, + config, + orm_file_path, + config_file_path, + stats_file_path, + ) + with os.fdopen(ssg_fd, "w", encoding="utf-8") as ssg_fh: + ssg_fh.write(ssg_content) + # `remove-data` so we don't have to use a separate database for the destination + remove_db_data_from(metadata, config, self.dsn, schema) + # `create-data` with all this stuff + ssg_module = import_file(ssg_file_path) + table_generator_dict = ssg_module.table_generator_dict + story_generator_list = ssg_module.story_generator_list + num_passes = 100 + row_counts = create_db_data_into( + sorted_non_vocabulary_tables(metadata, config), + table_generator_dict, + story_generator_list, + num_passes, + self.dsn, + schema, + ) + # Test that each missingness pattern is present in the database + with self.engine.connect() as conn: + stmt = select(metadata.tables[table_name]) + rows = conn.execute(stmt).mappings().fetchall() + patterns: set[int] = set() + for row in rows: + p = 0 if row["player_id"] is None else 1 + b = 0 if row["based_on"] is None else 2 + patterns.add(p + b) + # all pattern possibilities should be present + self.assertSetEqual(patterns, {0, 1, 2, 3}) From 89b2c186c2464ba2faae9f1f498d8c6979e3eba3 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Thu, 15 May 2025 16:23:04 +0100 Subject: [PATCH 71/85] interactive commands ask to save even when no changes have been made --- sqlsynthgen/interactive.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 2b825426..7baae0fd 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -219,8 +219,7 @@ def do_quit(self, _arg): entry.new_type.value, ) if count == 0: - self.print("There are no changes.") - return True + self.print("You have made no changes.") reply = self.ask_save() if reply == "yes": self._copy_entries() @@ -512,8 +511,7 @@ def do_quit(self, _arg): entry.new_type.name, ) if count == 0: - self.print("There are no changes.") - return True + self.print("You have made no changes.") reply = self.ask_save() if reply == "yes": self._copy_entries() @@ -840,8 +838,7 @@ def do_quit(self, _arg): gen.new_gen.function_name() if gen.new_gen else "nothing", ) if count == 0: - self.print("There are no changes.") - return True + self.print("You have made no changes.") reply = self.ask_save() if reply == "yes": self._copy_entries() From 7f3ed8103492d4f852ad507afe2836dcee34d802 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 16 May 2025 14:20:34 +0100 Subject: [PATCH 72/85] Much better information on columns in configure-tables and configure-generators --- sqlsynthgen/interactive.py | 41 ++++++++++++++++++++++++++++++++------ 1 file changed, 35 insertions(+), 6 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 7baae0fd..ca35d0ef 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -129,6 +129,11 @@ def table_name(self): return self.table_entries[self.table_index].name def table_metadata(self) -> Table: return self.metadata.tables[self.table_name()] + def report_columns(self): + self.print_table(["name", "type", "primary", "nullable", "foreign key"], [ + [name, str(col.type), col.primary_key, col.nullable, ", ".join([fk.column.name for fk in col.foreign_keys])] + for name, col in self.table_metadata().columns.items() + ]) def get_table_config(self, table_name: str) -> dict[str, any]: ts = self.config.get("tables", None) if type(ts) is not dict: @@ -157,7 +162,7 @@ class TableCmdTableEntry(TableEntry): new_type: TableType class TableCmd(DbCmd): - intro = "Interactive table configuration (ignore, vocabulary or private). Type ? for help.\n" + intro = "Interactive table configuration (ignore, vocabulary, private or normal). Type ? for help.\n" prompt = "(tableconf) " file = None @@ -273,11 +278,11 @@ def do_private(self, _arg): def do_normal(self, _arg): "Set the current table as neither a vocabulary table nor ignored nor primary private, and go to the next table" self.set_type(TableType.NORMAL) - self.print("Table {} reset", self.table_name()) + self.print("Table {} normal", self.table_name()) self.next_table() def do_columns(self, _arg): - "Report the column names" - self.columnize(self.table_metadata().columns.keys()) + "Report the column names and metadata" + self.report_columns() def do_data(self, arg: str): """ Report some data. @@ -873,8 +878,28 @@ def do_list(self, arg): self.print("{0}{1}{2} {3}", old, becomes, primary, gen.column) def do_columns(self, _arg): - "Report the column names" - self.columnize(self.table_metadata().columns.keys()) + "Report the column names and metadata" + self.report_columns() + + def do_info(self, _arg): + "Show information about the current column" + cm = self.column_metadata() + if cm is None: + return + self.print( + "Column {0} in table {1} has type {2} ({3}).", + cm.name, + cm.table.name, + str(cm.type), + "nullable" if cm.nullable else "not nullable", + ) + if cm.primary_key: + self.print("It is a primary key, which usually does not need a generator") + elif cm.foreign_keys: + fk_names = [fk.column.name for fk in cm.foreign_keys] + self.print("It is a foreign key referencing table {0}", ", ".join(fk_names)) + if len(fk_names) == 1: + self.print("You do not need a generator if you just want a uniform choice over the referenced table's rows") def _get_table_index(self, table_name: str) -> int | None: for n, entry in enumerate(self.table_entries): @@ -912,6 +937,9 @@ def do_next(self, arg): self.generator_index = gen_index self.set_prompt() return + self._go_next() + + def _go_next(self): table = self.get_table() if table is None: self.print("No more tables") @@ -1150,6 +1178,7 @@ def do_set(self, arg: str): self.print("Error: no column") return gen_info.new_gen = gens[index - 1] + self._go_next() def update_config_generators(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): From 97d3e349aad755eabd1897ff0735866a14c73856 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 16 May 2025 15:40:20 +0100 Subject: [PATCH 73/85] configure-generators propose on all-null column no longer fails --- sqlsynthgen/generators.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index 94aade82..7ef063c0 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -9,6 +9,7 @@ import math import mimesis import mimesis.locales +import psycopg2 import re from sqlalchemy import Column, Engine, text from sqlalchemy.types import Date, DateTime, Integer, Numeric, String, Time @@ -229,9 +230,21 @@ def make_buckets(_cls, engine: Engine, table_name: str, column_name: str): column=column_name, )) ).first() - if result is None or result.stddev is None: + if result is None or result.stddev is None or result.count == 0: return None - return Buckets(engine, table_name, column_name, result.mean, result.stddev, result.count) + try: + buckets = Buckets( + engine, + table_name, + column_name, + result.mean, + result.stddev, + result.count + ) + except psycopg2.errors.DatabaseError as e: + logger.debug("Failed to instantiate Buckets object %s", e) + return None + return buckets def fit_from_counts(self, bucket_counts: list[float]) -> float: """ From 2ec619226b8234278b9d69765dfbeb3dd54b17a3 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 16 May 2025 16:25:09 +0100 Subject: [PATCH 74/85] EMPTY (num_passes: 0) table type in interactive commands --- sqlsynthgen/interactive.py | 27 ++++++++++++++++++++++++++- tests/test_interactive.py | 37 ++++++++++++++++++++++++++++++++++++- 2 files changed, 62 insertions(+), 2 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index ca35d0ef..d45ed6ec 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -21,12 +21,14 @@ class TableType(Enum): IGNORE = "ignore" VOCABULARY = "vocabulary" PRIVATE = "private" + EMPTY = "empty" TYPE_LETTER = { TableType.NORMAL: " ", TableType.IGNORE: "I", TableType.VOCABULARY: "V", TableType.PRIVATE: "P", + TableType.EMPTY: "e", } TYPE_PROMPT = { @@ -34,6 +36,7 @@ class TableType(Enum): TableType.IGNORE: "(table: {} (ignore)) ", TableType.VOCABULARY: "(table: {} (vocab)) ", TableType.PRIVATE: "(table: {} (private)) ", + TableType.EMPTY: "(table: {} (empty))", } @dataclass @@ -162,7 +165,7 @@ class TableCmdTableEntry(TableEntry): new_type: TableType class TableCmd(DbCmd): - intro = "Interactive table configuration (ignore, vocabulary, private or normal). Type ? for help.\n" + intro = "Interactive table configuration (ignore, vocabulary, private, normal or empty). Type ? for help.\n" prompt = "(tableconf) " file = None @@ -173,6 +176,8 @@ def make_table_entry(self, name: str, table: Mapping) -> TableEntry: return TableCmdTableEntry(name, TableType.VOCABULARY, TableType.VOCABULARY) if table.get("primary_private", False): return TableCmdTableEntry(name, TableType.PRIVATE, TableType.PRIVATE) + if table.get("num_passes", 1) == 0: + return TableCmdTableEntry(name, TableType.EMPTY, TableType.EMPTY) return TableCmdTableEntry(name, TableType.NORMAL, TableType.NORMAL) def __init__(self, src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): @@ -193,6 +198,8 @@ def _copy_entries(self) -> None: for entry in self.table_entries: if entry.old_type != entry.new_type: table = self.get_table_config(entry.name) + if entry.old_type == TableType.EMPTY and table.get("num_passes", 1) == 0: + table["num_passes"] = 1 if entry.new_type == TableType.IGNORE: table["ignore"] = True table.pop("vocabulary_table", None) @@ -205,6 +212,11 @@ def _copy_entries(self) -> None: table.pop("ignore", None) table.pop("vocabulary_table", None) table["primary_private"] = True + elif entry.new_type == TableType.EMPTY: + table.pop("ignore", None) + table.pop("vocabulary_table", None) + table.pop("primary_private", None) + table["num_passes"] = 0 else: table.pop("ignore", None) table.pop("vocabulary_table", None) @@ -280,6 +292,11 @@ def do_normal(self, _arg): self.set_type(TableType.NORMAL) self.print("Table {} normal", self.table_name()) self.next_table() + def do_empty(self, _arg): + "Set the current table as empty; no generators will be run for it" + self.set_type(TableType.EMPTY) + self.print("Table {} empty", self.table_name()) + self.next_table() def do_columns(self, _arg): "Report the column names and metadata" self.report_columns() @@ -430,6 +447,12 @@ def find_missingness_query(self, missingness_generator: Mapping): return src_stat.get("query", None) return None def make_table_entry(self, name: str, table: Mapping) -> TableEntry: + if table.get("ignore", False): + return None + if table.get("vocabulary_table", False): + return None + if table.get("num_passes", 1) == 0: + return None mgs = table.get("missingness_generators", []) old = None nonnull_columns = self.get_nonnull_columns(name) @@ -661,6 +684,8 @@ def make_table_entry(self, table_name: str, table: Mapping) -> TableEntry | None return None if table.get("vocabulary_table", False): return None + if table.get("num_passes", 1) == 0: + return None metadata_table = self.metadata.tables[table_name] columns = frozenset(metadata_table.columns.keys()) col2gen: dict[str, Generator] = {} diff --git a/tests/test_interactive.py b/tests/test_interactive.py index 16d9976f..e13e52b5 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -86,7 +86,15 @@ def test_column_display(self) -> None: with TestTableCmd(self.dsn, self.schema_name, metadata, config) as tc: tc.do_next("unique_constraint_test") tc.do_columns("") - self.assertListEqual(tc.column_items, [["id", "a", "b", "c"]]) + self.assertListEqual( + tc.rows, + [ + ["id", "INTEGER", True, False, ""], + ["a", "BOOLEAN", False, False, ""], + ["b", "BOOLEAN", False, False, ""], + ["c", "TEXT", False, False, ""], + ], + ) def test_null_configuration(self) -> None: """A table still works if its configuration is None.""" @@ -134,6 +142,12 @@ def test_configure_tables(self) -> None: "no_pk_test": { "ignore": True, }, + "hospital_visit": { + "num_passes": 0, + }, + "empty_vocabulary": { + "private": True, + } }, } with TestTableCmd(self.dsn, self.schema_name, metadata, config) as tc: @@ -143,16 +157,37 @@ def test_configure_tables(self) -> None: tc.do_vocabulary("") tc.do_next("mitigation_type") tc.do_ignore("") + tc.do_next("hospital_visit") + tc.do_private("") + tc.do_quit("") + tc.do_next("empty_vocabulary") + tc.do_empty("") tc.do_quit("") tables = tc.config["tables"] self.assertFalse(tables["unique_constraint_test"].get("vocabulary_table", False)) self.assertFalse(tables["unique_constraint_test"].get("ignore", False)) + self.assertFalse(tables["unique_constraint_test"].get("primary_private", False)) + self.assertEqual(tables["unique_constraint_test"].get("num_passes", 1), 1) self.assertFalse(tables["no_pk_test"].get("vocabulary_table", False)) self.assertTrue(tables["no_pk_test"].get("ignore", False)) + self.assertFalse(tables["no_pk_test"].get("primary_private", False)) + self.assertEqual(tables["no_pk_test"].get("num_passes", 1), 1) self.assertTrue(tables["person"].get("vocabulary_table", False)) self.assertFalse(tables["person"].get("ignore", False)) + self.assertFalse(tables["person"].get("primary_private", False)) + self.assertEqual(tables["person"].get("num_passes", 1), 1) self.assertFalse(tables["mitigation_type"].get("vocabulary_table", False)) self.assertTrue(tables["mitigation_type"].get("ignore", False)) + self.assertFalse(tables["mitigation_type"].get("primary_private", False)) + self.assertEqual(tables["mitigation_type"].get("num_passes", 1), 1) + self.assertFalse(tables["hospital_visit"].get("vocabulary_table", False)) + self.assertFalse(tables["hospital_visit"].get("ignore", False)) + self.assertTrue(tables["hospital_visit"].get("primary_private", False)) + self.assertEqual(tables["hospital_visit"].get("num_passes", 1), 1) + self.assertFalse(tables["empty_vocabulary"].get("vocabulary_table", False)) + self.assertFalse(tables["empty_vocabulary"].get("ignore", False)) + self.assertFalse(tables["empty_vocabulary"].get("primary_private", False)) + self.assertEqual(tables["empty_vocabulary"].get("num_passes", 1), 0) def test_print_data(self) -> None: """Test that we can print random rows from the table and random data from columns.""" From 24eeb1be6da8611b6c19ebf862285c1f05310008 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 16 May 2025 17:39:44 +0100 Subject: [PATCH 75/85] datetime generator creation fixed --- sqlsynthgen/generators.py | 42 +++++++++++++++++++++++++++++---------- 1 file changed, 31 insertions(+), 11 deletions(-) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index 7ef063c0..90238965 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -346,21 +346,41 @@ def fit(self, default=None): class MimesisDateTimeGenerator(MimesisGeneratorBase): - def __init__(self, column: Column, engine: Engine, function_name: str): + def __init__(self, column: Column, function_name: str, min_year: str, max_year: str, start: int, end: int): + """ + :param column: The column to generate into + :param function_name: The name of the mimesis function + :param min_year: SQL expression extracting the minimum year + :param min_year: SQL expression extracting the maximum year + :param start: The actual first year found + :param end: The actual last year found + """ super().__init__(function_name) self._column = column - self._function_name = function_name - self._extract_year = f"EXTRACT(YEAR FROM {column.name})" - self._max_year = f"MAX({self._extract_year})" - self._min_year = f"MIN({self._extract_year})" + self._max_year = max_year + self._min_year = min_year + self._start = start + self._end = end + + @classmethod + def make_singleton(_cls, column: Column, engine: Engine, function_name: str): + extract_year = f"EXTRACT(YEAR FROM {column.name})" + max_year = f"MAX({extract_year})" + min_year = f"MIN({extract_year})" with engine.connect() as connection: result = connection.execute( - text(f"SELECT {self._min_year} AS start, {self._max_year} AS end FROM {column.table.name}") + text(f"SELECT {min_year} AS start, {max_year} AS end FROM {column.table.name}") ).first() if result is None or result.start is None or result.end is None: - return None - self._start = int(result.start) - self._end = int(result.end) + return [] + return [MimesisDateTimeGenerator( + column, + function_name, + min_year, + max_year, + int(result.start), + int(result.end), + )] def nominal_kwargs(self): return { "start": f'SRC_STATS["auto__{self._column.table.name}"]["{self._column.name}__start"]', @@ -458,7 +478,7 @@ def get_generators(self, column: Column, engine: Engine): ct = column.type.as_generic() if not isinstance(ct, Date): return [] - return [MimesisDateTimeGenerator(column, engine, "datetime.date")] + return MimesisDateTimeGenerator.make_singleton(column, engine, "datetime.date") class MimesisDateTimeGeneratorFactory(GeneratorFactory): @@ -469,7 +489,7 @@ def get_generators(self, column: Column, engine: Engine): ct = column.type.as_generic() if not isinstance(ct, DateTime): return [] - return [MimesisDateTimeGenerator(column, engine, "datetime.datetime")] + return MimesisDateTimeGenerator.make_singleton(column, engine, "datetime.datetime") class MimesisTimeGeneratorFactory(GeneratorFactory): From 8e43d43b5194b64c95cc8b61479dd2d2190e9287 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 16 May 2025 18:16:32 +0100 Subject: [PATCH 76/85] NULL generator --- sqlsynthgen/base.py | 3 +++ sqlsynthgen/generators.py | 27 +++++++++++++++++++++++++-- sqlsynthgen/interactive.py | 6 +++--- 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/sqlsynthgen/base.py b/sqlsynthgen/base.py index 4e947cfe..969d5525 100644 --- a/sqlsynthgen/base.py +++ b/sqlsynthgen/base.py @@ -53,6 +53,9 @@ def choice(self, a): def zipf_choice(self, a, n): return self.rng.choice(a, p = zipf_weights(n)) + def constant(self, value): + return value + class TableGenerator(ABC): """Abstract base class for table generator classes.""" diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index 90238965..6e7a7da8 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -669,8 +669,7 @@ class ChoiceGeneratorFactory(GeneratorFactory): """ All generators that want an average and standard deviation. """ - def get_generators(self, column, engine: Engine): - ct = column.type.as_generic() + def get_generators(self, column: Column, engine: Engine): column_name = column.name table_name = column.table.name with engine.connect() as connection: @@ -703,6 +702,29 @@ def get_generators(self, column, engine: Engine): ] +class NullGenerator(Generator): + def __init__(self): + super().__init__() + def function_name(self) -> str: + return "dist_gen.constant" + def nominal_kwargs(self) -> dict[str, str]: + return {"value": "None"} + def actual_kwargs(self) -> dict[str, any]: + return {"value": None} + def generate_data(self, count) -> list[any]: + return [None for _ in range(count)] + + +class ConstantGeneratorFactory(GeneratorFactory): + """ + Just the null generator + """ + def get_generators(self, column: Column, _engine: Engine): + if column.nullable: + return [NullGenerator()] + return [] + + everything_factory = MultiGeneratorFactory([ MimesisStringGeneratorFactory(), MimesisIntegerGeneratorFactory(), @@ -712,4 +734,5 @@ def get_generators(self, column, engine: Engine): MimesisTimeGeneratorFactory(), ContinuousDistributionGeneratorFactory(), ChoiceGeneratorFactory(), + ConstantGeneratorFactory(), ]) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index d45ed6ec..327c7d06 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -350,7 +350,7 @@ def complete_data(self, text, line, begidx, endidx): ] def print_column_data(self, column: str, count: int, min_length: int): - where = "" + where = f"WHERE {column} IS NOT NULL" if 0 < min_length: where = "WHERE LENGTH({column}) >= {len}".format( column=column, @@ -1134,7 +1134,7 @@ def _print_select_aggregate_query(self, table_name, gen: Generator) -> None: def _get_column_data(self, count: int, to_str=repr, min_length: int = 0): column = str(self.get_column_name()) - where = "" + where = f"WHERE {column} IS NOT NULL" if 0 < min_length: where = "WHERE LENGTH({column}) >= {len}".format( column=column, @@ -1151,7 +1151,7 @@ def _get_column_data(self, count: int, to_str=repr, min_length: int = 0): ) return [to_str(x[0]) for x in result.all()] - def do_propose(self, arg): + def do_propose(self, _arg): """ Display a list of possible generators for this column. From 7c08c55436d690a4f27aa4aebeaa0f6b809a1892 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 16 May 2025 22:38:13 +0100 Subject: [PATCH 77/85] #20 configure-generators unset command --- sqlsynthgen/generators.py | 7 ++++--- sqlsynthgen/interactive.py | 21 ++++++++++++++++++++- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index 6e7a7da8..eb978710 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -11,6 +11,7 @@ import mimesis.locales import psycopg2 import re +import sqlalchemy from sqlalchemy import Column, Engine, text from sqlalchemy.types import Date, DateTime, Integer, Numeric, String, Time from typing import Callable @@ -239,10 +240,10 @@ def make_buckets(_cls, engine: Engine, table_name: str, column_name: str): column_name, result.mean, result.stddev, - result.count + result.count, ) - except psycopg2.errors.DatabaseError as e: - logger.debug("Failed to instantiate Buckets object %s", e) + except sqlalchemy.exc.ProgrammingError as exc: + logger.debug("Failed to instantiate Buckets object: %s", exc) return None return buckets diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 327c7d06..8accc688 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -1006,8 +1006,11 @@ def do_previous(self, _arg): self.generator_index -= 1 self.set_prompt() + def _generators_valid(self) -> bool: + return self.generators_valid_indices == (self.table_index, self.generator_index) + def _get_generator_proposals(self) -> list[Generator]: - if self.generators_valid_indices != (self.table_index, self.generator_index): + if not self._generators_valid(): self.generators = None if self.generators is None: column = self.column_metadata() @@ -1187,6 +1190,9 @@ def do_set(self, arg: str): if not arg.isdigit(): self.print("set requires a single integer argument; 'set 3' sets the third generator that 'propose' lists.") return + if not self._generators_valid(): + self.print("Please run 'propose' before 'set'") + return gens = self._get_generator_proposals() index = int(arg) if index < 1: @@ -1205,6 +1211,19 @@ def do_set(self, arg: str): gen_info.new_gen = gens[index - 1] self._go_next() + def do_unset(self, _arg): + """ + Removes any generator set for this column. + """ + (table, gen_info) = self.get_table_and_generator() + if table is None: + self.print("Error: no table") + return + if gen_info is None: + self.print("Error: no column") + return + gen_info.new_gen = None + self._go_next() def update_config_generators(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): with GeneratorCmd(src_dsn, src_schema, metadata, config) as gc: From 5aa3faa5e1889890c73bbe5057d71920ed24ca0b Mon Sep 17 00:00:00 2001 From: Tim Band Date: Fri, 16 May 2025 23:04:34 +0100 Subject: [PATCH 78/85] #21 configure-generators: better text for propose if no source data --- sqlsynthgen/interactive.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 8accc688..520c1a19 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -674,6 +674,7 @@ class GeneratorCmd(DbCmd): file = None PROPOSE_SOURCE_SAMPLE_TEXT = "Sample of actual source data: {0}..." + PROPOSE_SOURCE_EMPTY_TEXT = "Source database has no data in this column." PROPOSE_GENERATOR_SAMPLE_TEXT = "{index}. {name}: {fit} {sample} ..." PRIMARY_PRIVATE_TEXT = "Primary Private" SECONDARY_PRIVATE_TEXT = "Secondary Private on columns {0}" @@ -1165,7 +1166,10 @@ def do_propose(self, _arg): limit = 5 gens = self._get_generator_proposals() sample = self._get_column_data(limit) - self.print(self.PROPOSE_SOURCE_SAMPLE_TEXT, ",".join(sample)) + if sample: + self.print(self.PROPOSE_SOURCE_SAMPLE_TEXT, ",".join(sample)) + else: + self.print(self.PROPOSE_SOURCE_EMPTY_TEXT) for index, gen in enumerate(gens): fit = gen.fit() if fit is None: From 4533b4c57e5f59b615e2dd1c802821dc6a58d0f1 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Sat, 17 May 2025 23:33:43 +0100 Subject: [PATCH 79/85] Some fixes --- sqlsynthgen/generators.py | 9 +++++---- sqlsynthgen/interactive.py | 26 +++++++++++++++++++++----- 2 files changed, 26 insertions(+), 9 deletions(-) diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index eb978710..a4cc048e 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -231,7 +231,7 @@ def make_buckets(_cls, engine: Engine, table_name: str, column_name: str): column=column_name, )) ).first() - if result is None or result.stddev is None or result.count == 0: + if result is None or result.stddev is None or result.count < 2: return None try: buckets = Buckets( @@ -242,7 +242,7 @@ def make_buckets(_cls, engine: Engine, table_name: str, column_name: str): result.stddev, result.count, ) - except sqlalchemy.exc.ProgrammingError as exc: + except sqlalchemy.exc.DatabaseError as exc: logger.debug("Failed to instantiate Buckets object: %s", exc) return None return buckets @@ -258,7 +258,7 @@ def fit_from_values(self, values: list[float]) -> float: Figure out the fit from samples from the generator distribution. """ buckets = [0] * 10 - x=self.mean - 2 * self.stddev + x = self.mean - 2 * self.stddev w = self.stddev / 2 for v in values: b = min(9, max(0, int((v - x)/w))) @@ -517,7 +517,8 @@ def get_generators(self, column: Column, _engine: Engine): def fit_from_buckets(xs: list[float], ys: list[float]): sum_diff_squared = sum(map(lambda t, a: (t - a)*(t - a), xs, ys)) - return sum_diff_squared / len(ys) + count = len(ys) + return sum_diff_squared / (count * count) class ContinuousDistributionGenerator(Generator): diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 520c1a19..5da8d137 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -166,6 +166,9 @@ class TableCmdTableEntry(TableEntry): class TableCmd(DbCmd): intro = "Interactive table configuration (ignore, vocabulary, private, normal or empty). Type ? for help.\n" + doc_leader = """Use the commands ignore, vocabulary, private, empty or normal to set the table's type. +Use next or previous to change table. Use list and columns for information about the database. +Use data to see some data contained in the current table. Use quit to exit this program.""" prompt = "(tableconf) " file = None @@ -176,7 +179,7 @@ def make_table_entry(self, name: str, table: Mapping) -> TableEntry: return TableCmdTableEntry(name, TableType.VOCABULARY, TableType.VOCABULARY) if table.get("primary_private", False): return TableCmdTableEntry(name, TableType.PRIVATE, TableType.PRIVATE) - if table.get("num_passes", 1) == 0: + if table.get("num_rows_per_pass", 1) == 0: return TableCmdTableEntry(name, TableType.EMPTY, TableType.EMPTY) return TableCmdTableEntry(name, TableType.NORMAL, TableType.NORMAL) @@ -198,8 +201,8 @@ def _copy_entries(self) -> None: for entry in self.table_entries: if entry.old_type != entry.new_type: table = self.get_table_config(entry.name) - if entry.old_type == TableType.EMPTY and table.get("num_passes", 1) == 0: - table["num_passes"] = 1 + if entry.old_type == TableType.EMPTY and table.get("num_rows_per_pass", 1) == 0: + table["num_rows_per_pass"] = 1 if entry.new_type == TableType.IGNORE: table["ignore"] = True table.pop("vocabulary_table", None) @@ -216,7 +219,7 @@ def _copy_entries(self) -> None: table.pop("ignore", None) table.pop("vocabulary_table", None) table.pop("primary_private", None) - table["num_passes"] = 0 + table["num_rows_per_pass"] = 0 else: table.pop("ignore", None) table.pop("vocabulary_table", None) @@ -393,7 +396,7 @@ class MissingnessType: SAMPLED_QUERY=( "SELECT COUNT(*) AS row_count, {result_names} FROM " "(SELECT {column_is_nulls} FROM {table} ORDER BY RANDOM() LIMIT {count})" - " GROUP BY {result_names}" + " AS __t GROUP BY {result_names}" ) name: str query: str @@ -424,6 +427,11 @@ class MissingnessCmdTableEntry(TableEntry): class MissingnessCmd(DbCmd): intro = "Interactive missingness configuration. Type ? for help.\n" + doc_leader = """Use commands sampled and none to choose the +missingness style for the current table. Use commands next and +previous to change the current table. Use list to list the tables and +count to show how many NULLs exist in each column. Use quit +to exit this tool.""" prompt = "(missingness) " file = None ROW_COUNT_MSG = "Total row count: {}" @@ -670,6 +678,14 @@ class GeneratorCmdTableEntry(TableEntry): class GeneratorCmd(DbCmd): intro = "Interactive generator configuration. Type ? for help.\n" + doc_leader = """Use command 'propose' for a list of generators applicable to the current +column, then command 'compare' to see how these perform against the +source data, then command 'set' to choose your favourite. Use 'unset' +to remove the column's generator. Use commands 'next' and +'previous' to change which column we are examining. Use 'info' +for useful information about the current column. Use 'tables' and +'list' to see available tables and columns. Use 'columns' to see +information about the columns in the current table.'""" prompt = "(generatorconf) " file = None From 241e1413e5d13106f17afbbb41ae43a90bf6fe74 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Sun, 18 May 2025 09:34:26 +0100 Subject: [PATCH 80/85] #25 interactive peek, select and count test fixes --- sqlsynthgen/interactive.py | 161 +++++++++++++++++++++++++------------ tests/test_interactive.py | 14 ++-- 2 files changed, 118 insertions(+), 57 deletions(-) diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 5da8d137..6b932b9a 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -6,6 +6,7 @@ import logging from prettytable import PrettyTable import re +import sqlalchemy from sqlalchemy import Column, MetaData, Table, text from sqlsynthgen.generators import everything_factory, Generator, PredefinedGenerator @@ -66,6 +67,7 @@ class DbCmd(ABC, cmd.Cmd): ERROR_NO_MORE_TABLES = "Error: There are no more tables" ERROR_ALREADY_AT_START = "Error: Already at the start" ERROR_NO_SUCH_TABLE = "Error: '{0}' is not the name of a table in this database" + ROW_COUNT_MSG = "Total row count: {}" @abstractmethod def make_table_entry(self, name: str, table_config: Mapping) -> TableEntry: @@ -157,6 +159,95 @@ def _remove_prefix_src_stats(self, prefix: str) -> list[dict[str, any]]: new_src_stats.append(stat) self.config["src-stats"] = new_src_stats return new_src_stats + def get_nonnull_columns(self, table_name: str): + metadata_table = self.metadata.tables[table_name] + return [ + str(name) + for name, column in metadata_table.columns.items() + if column.nullable + ] + + def do_counts(self, _arg): + "Report the column names with the counts of nulls in them" + if len(self.table_entries) <= self.table_index: + return + table_name = self.table_name() + nonnull_columns = self.get_nonnull_columns(table_name) + colcounts = [ + ", COUNT({0}) AS {0}".format(nnc) + for nnc in nonnull_columns + ] + with self.engine.connect() as connection: + result = connection.execute( + text("SELECT COUNT(*) AS row_count{colcounts} FROM {table}".format( + table=table_name, + colcounts="".join(colcounts), + )) + ).first() + if result is None: + self.print("Could not count rows in table {0}", table_name) + return + row_count = result.row_count + self.print(self.ROW_COUNT_MSG, row_count) + self.print_table(["Column", "NULL count"], [ + [name, row_count - count] + for name, count in result._mapping.items() + if name != "row_count" + ]) + + def do_select(self, arg): + "Run a select query over the database and show the first 50 results" + MAX_SELECT_ROWS = 50 + with self.engine.connect() as connection: + try: + result = connection.execute( + text("SELECT " + arg) + ) + except sqlalchemy.exc.DatabaseError as exc: + self.print("Failed to execute: {}", exc) + return + row_count = result.rowcount + self.print(self.ROW_COUNT_MSG, row_count) + if 50 < row_count: + self.print("Showing the first {} rows", MAX_SELECT_ROWS) + fields = list(result.keys()) + rows = [ + row.tuple() + for row in result.fetchmany(MAX_SELECT_ROWS) + ] + self.print_table(fields, rows) + + def do_peek(self, arg: str): + """Use 'peek col1 col2 col3' to see a sample of values from columns col1, col2 and col3 in the current table.""" + MAX_PEEK_ROWS = 25 + if len(self.table_entries) <= self.table_index: + return + table_name = self.table_name() + col_names = arg.split() + nonnulls = [cn + " IS NOT NULL" for cn in col_names] + with self.engine.connect() as connection: + result = connection.execute( + text("SELECT {cols} FROM {table} WHERE {nonnull} LIMIT {max}".format( + cols=",".join(col_names), + table=table_name, + nonnull=" AND ".join(nonnulls), + max=MAX_PEEK_ROWS, + )) + ) + rows = [ + row.tuple() + for row in result.fetchmany(MAX_PEEK_ROWS) + ] + self.print_table(list(result.keys()), rows) + + def complete_peek(self, text: str, _line: str, _begidx: int, _endidx: int): + if len(self.table_entries) <= self.table_index: + return [] + return [ + col + for col in self.table_metadata().columns.keys() + if col.startswith(text) + ] @dataclass @@ -166,9 +257,12 @@ class TableCmdTableEntry(TableEntry): class TableCmd(DbCmd): intro = "Interactive table configuration (ignore, vocabulary, private, normal or empty). Type ? for help.\n" - doc_leader = """Use the commands ignore, vocabulary, private, empty or normal to set the table's type. -Use next or previous to change table. Use list and columns for information about the database. -Use data to see some data contained in the current table. Use quit to exit this program.""" + doc_leader = """Use the commands 'ignore', 'vocabulary', +'private', 'empty' or 'normal' to set the table's type. Use 'next' or +'previous' to change table. Use 'tables' and 'columns' for +information about the database. Use 'data', 'peek', 'select' or +'count' to see some data contained in the current table. Use 'quit' +to exit this program.""" prompt = "(tableconf) " file = None @@ -247,7 +341,7 @@ def do_quit(self, _arg): if reply == "no": return True return False - def do_list(self, arg): + def do_tables(self, arg): "list the tables with their types" for entry in self.table_entries: old = entry.old_type @@ -427,23 +521,15 @@ class MissingnessCmdTableEntry(TableEntry): class MissingnessCmd(DbCmd): intro = "Interactive missingness configuration. Type ? for help.\n" - doc_leader = """Use commands sampled and none to choose the -missingness style for the current table. Use commands next and -previous to change the current table. Use list to list the tables and -count to show how many NULLs exist in each column. Use quit -to exit this tool.""" + doc_leader = """Use commands 'sampled' and 'none' to choose the missingness style for +the current table. Use commands 'next' and 'previous' to change the +current table. Use 'tables' to list the tables and 'count' to show +how many NULLs exist in each column. Use 'peek' or 'select' to see +data from the database. Use 'quit' to exit this tool.""" prompt = "(missingness) " file = None - ROW_COUNT_MSG = "Total row count: {}" PATTERN_RE = re.compile(r'SRC_STATS\["([^"]*)"\]') - def get_nonnull_columns(self, table_name: str): - metadata_table = self.metadata.tables[table_name] - return [ - str(name) - for name, column in metadata_table.columns.items() - if column.nullable - ] def find_missingness_query(self, missingness_generator: Mapping): kwargs = missingness_generator.get("kwargs", {}) patterns = kwargs.get("patterns", "") @@ -555,7 +641,7 @@ def do_quit(self, _arg): if reply == "no": return True return False - def do_list(self, arg): + def do_tables(self, arg): "list the tables with their types" for entry in self.table_entries: old = "-" if entry.old_type is None else entry.old_type.name @@ -630,33 +716,6 @@ def do_none(self, _arg): self._set_none() self.print("Table {} set to have no missingness", self.table_name()) self.next_table() - def do_counts(self, _arg): - "Report the column names with the counts of nulls in them" - if len(self.table_entries) <= self.table_index: - return - table_name = self.table_entries[self.table_index].name - nonnull_columns = self.get_nonnull_columns(table_name) - colcounts = [ - ", COUNT({0}) AS {0}".format(nnc) - for nnc in nonnull_columns - ] - with self.engine.connect() as connection: - result = connection.execute( - text("SELECT COUNT(*) AS row_count{colcounts} FROM {table}".format( - table=table_name, - colcounts="".join(colcounts), - )) - ).first() - if result is None: - self.print("Could not count rows in table {0}", table_name) - return - row_count = result.row_count - self.print(self.ROW_COUNT_MSG, row_count) - self.print_table(["Column", "NULL count"], [ - [name, row_count - count] - for name, count in result._mapping.items() - if name != "row_count" - ]) def update_missingness(src_dsn: str, src_schema: str, metadata: MetaData, config: Mapping): @@ -678,14 +737,16 @@ class GeneratorCmdTableEntry(TableEntry): class GeneratorCmd(DbCmd): intro = "Interactive generator configuration. Type ? for help.\n" - doc_leader = """Use command 'propose' for a list of generators applicable to the current -column, then command 'compare' to see how these perform against the -source data, then command 'set' to choose your favourite. Use 'unset' -to remove the column's generator. Use commands 'next' and + doc_leader = """Use command 'propose' for a list of generators applicable to the +current column, then command 'compare' to see how these perform +against the source data, then command 'set' to choose your favourite. +Use 'unset' to remove the column's generator. Use commands 'next' and 'previous' to change which column we are examining. Use 'info' for useful information about the current column. Use 'tables' and 'list' to see available tables and columns. Use 'columns' to see -information about the columns in the current table.'""" +information about the columns in the current table. Use 'peek', +'count' or 'select' to fetch data from the source database. Use +'quit' to exit this program.""" prompt = "(generatorconf) " file = None diff --git a/tests/test_interactive.py b/tests/test_interactive.py index e13e52b5..b00a684c 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -171,23 +171,23 @@ def test_configure_tables(self) -> None: self.assertFalse(tables["no_pk_test"].get("vocabulary_table", False)) self.assertTrue(tables["no_pk_test"].get("ignore", False)) self.assertFalse(tables["no_pk_test"].get("primary_private", False)) - self.assertEqual(tables["no_pk_test"].get("num_passes", 1), 1) + self.assertEqual(tables["no_pk_test"].get("num_rows_per_pass", 1), 1) self.assertTrue(tables["person"].get("vocabulary_table", False)) self.assertFalse(tables["person"].get("ignore", False)) self.assertFalse(tables["person"].get("primary_private", False)) - self.assertEqual(tables["person"].get("num_passes", 1), 1) + self.assertEqual(tables["person"].get("num_rows_per_pass", 1), 1) self.assertFalse(tables["mitigation_type"].get("vocabulary_table", False)) self.assertTrue(tables["mitigation_type"].get("ignore", False)) self.assertFalse(tables["mitigation_type"].get("primary_private", False)) - self.assertEqual(tables["mitigation_type"].get("num_passes", 1), 1) + self.assertEqual(tables["mitigation_type"].get("num_rows_per_pass", 1), 1) self.assertFalse(tables["hospital_visit"].get("vocabulary_table", False)) self.assertFalse(tables["hospital_visit"].get("ignore", False)) self.assertTrue(tables["hospital_visit"].get("primary_private", False)) - self.assertEqual(tables["hospital_visit"].get("num_passes", 1), 1) + self.assertEqual(tables["hospital_visit"].get("num_rows_per_pass", 1), 1) self.assertFalse(tables["empty_vocabulary"].get("vocabulary_table", False)) self.assertFalse(tables["empty_vocabulary"].get("ignore", False)) self.assertFalse(tables["empty_vocabulary"].get("primary_private", False)) - self.assertEqual(tables["empty_vocabulary"].get("num_passes", 1), 0) + self.assertEqual(tables["empty_vocabulary"].get("num_rows_per_pass", 1), 0) def test_print_data(self) -> None: """Test that we can print random rows from the table and random data from columns.""" @@ -258,7 +258,7 @@ def test_list_tables(self): tc.do_next("person") tc.do_vocabulary("") tc.reset() - tc.do_list("") + tc.do_tables("") person_listed = False unique_constraint_test_listed = False no_pk_test_listed = False @@ -647,7 +647,7 @@ def test_set_missingness_to_sampled(self): "name": "missing_auto__signature_model__0", "query": ("SELECT COUNT(*) AS row_count, player_id__is_null, based_on__is_null FROM" " (SELECT player_id IS NULL AS player_id__is_null, based_on IS NULL AS based_on__is_null FROM" - " signature_model ORDER BY RANDOM() LIMIT 1000) GROUP BY player_id__is_null, based_on__is_null") + " signature_model ORDER BY RANDOM() LIMIT 1000) AS __t GROUP BY player_id__is_null, based_on__is_null") }] } ) From ea3df37c75a02f543433cbbee644af7c5538d139 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Sun, 18 May 2025 23:47:20 +0100 Subject: [PATCH 81/85] make-vocab gets --only --- sqlsynthgen/main.py | 2 ++ sqlsynthgen/make.py | 11 ++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 81fbd06a..6fc6eb2b 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -191,6 +191,7 @@ def make_vocab( config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), force: bool = Option(True, help="Overwrite any existing vocabulary file."), compress: bool = Option(False, help="Compress file to .gz"), + only: list[str] = Option([], help="Only download this table."), ) -> None: """Make files of vocabulary tables. @@ -209,6 +210,7 @@ def make_vocab( generator_config, overwrite_files=force, compress=compress, + table_names=set(only) if only else None, ) diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index 19f27769..a12799e3 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -508,6 +508,7 @@ def make_vocabulary_tables( config: Mapping, overwrite_files: bool, compress: bool, + table_names: set[str] | None=None, ): """ Extracts the data from the source database for each @@ -519,7 +520,15 @@ def make_vocabulary_tables( engine = get_sync_engine(create_db_engine(src_dsn, schema_name=settings.src_schema)) vocab_names = get_vocabulary_table_names(config) - for table_name in vocab_names: + if table_names is None: + table_names = vocab_names + else: + invalid_names = table_names - vocab_names + if invalid_names: + logger.error("The following names are not the names of vocabulary tables: %s", invalid_names) + logger.info("Valid names are: %s", vocab_names) + return + for table_name in table_names: _generate_vocabulary_table( metadata.tables[table_name], engine, From 49a1eb2eb6c3f26dc6602d2a2107c3d265198a57 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 20 May 2025 09:47:22 +0100 Subject: [PATCH 82/85] removed generate-configuration --- sqlsynthgen/main.py | 26 -------------------------- sqlsynthgen/make.py | 19 ------------------- 2 files changed, 45 deletions(-) diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index 6fc6eb2b..e0614777 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -23,7 +23,6 @@ make_table_generators, make_tables_file, make_vocabulary_tables, - generate_config_file, ) from sqlsynthgen.remove import remove_db_data, remove_db_tables, remove_db_vocab from sqlsynthgen.settings import Settings, get_settings @@ -310,31 +309,6 @@ def make_tables( logger.debug("%s created.", orm_file) -@app.command() -def generate_config( - config_file: Optional[str] = Option(CONFIG_FILENAME, help="Path to write the configuration file to"), - force: bool = Option(False, help="Overwrite any existing configuration yaml file"), -) -> None: - """ - Generate a basic configuration file. - - The configuration produced just includes default configuration for the - existing source database tables. - """ - logger.debug("Creating %s.", config_file) - - config_file_path = Path(config_file) - if not force: - _check_file_non_existence(config_file_path) - - settings = get_settings() - src_dsn: str = _require_src_db_dsn(settings) - - content = generate_config_file(src_dsn, settings.src_schema) - config_file_path.write_text(content, encoding="utf-8") - logger.debug("%s created.", config_file) - - @app.command() def configure_tables( config_file: Optional[str] = Option(CONFIG_FILENAME, help="Path to write the configuration file to"), diff --git a/sqlsynthgen/make.py b/sqlsynthgen/make.py index a12799e3..0ad55885 100644 --- a/sqlsynthgen/make.py +++ b/sqlsynthgen/make.py @@ -652,25 +652,6 @@ def _generate_vocabulary_table( download_table(table, engine, yaml_file_name, compress) -def generate_config_file( - db_dsn: str, schema_name: Optional[str] -) -> str: - engine = get_sync_engine(create_db_engine(db_dsn, schema_name=schema_name)) - metadata = MetaData() - metadata.reflect(engine) - tables = {} - for table_name in metadata.tables.keys(): - table = { - "ignore": False, - "vocabulary_table": False, - "unions": {}, - "num_rows_per_pass": 1, - "row_generators": [], - } - tables[table_name] = table - return yaml.dump({"tables": tables}) - - def make_tables_file( db_dsn: str, schema_name: Optional[str], config: Mapping[str, Any] ) -> str: From 4274b5344c3229f7b9d5f1510cef97cffb830937 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 20 May 2025 09:54:51 +0100 Subject: [PATCH 83/85] change make-generators to create-generators --- docs/source/health_data.rst | 8 ++++---- docs/source/introduction.rst | 26 +++++++++++++------------- docs/source/loan_data.rst | 10 +++++----- docs/source/quickstart.rst | 32 +++++++++++++++++++------------- sqlsynthgen/main.py | 4 ++-- tests/test_functional.py | 6 +++--- tests/test_main.py | 24 ++++++++++++------------ tests/test_make.py | 4 ++-- 8 files changed, 60 insertions(+), 54 deletions(-) diff --git a/docs/source/health_data.rst b/docs/source/health_data.rst index 1467572e..3f1c76e9 100644 --- a/docs/source/health_data.rst +++ b/docs/source/health_data.rst @@ -16,7 +16,7 @@ Before getting into the config itself, we need to discuss a few peculiarities of 1. Some versions of OMOP contain a circular foreign key, for instance between the ``vocabulary``, ``concept``, and ``domain`` tables. 2. There are several standardized vocabulary tables (``concept``, ``concept_relationship``, etc). These should be marked as such in the sqlsynthgen config file. - The tables will be exported to ``.yaml`` files during the ``make-generators`` step. + The tables will be exported to ``.yaml`` files during the ``create-generators`` step. However, some of these vocabulary tables may be too large to practically be writable to ``.yaml`` files, and will need to be dealt with manually. You should also check the license agreement of each standardized vocabulary before sharing any of the ``.yaml`` files. @@ -106,15 +106,15 @@ The usual way is to run .. code-block:: shell - sqlsynthgen make-generators --config-file=config.yaml + sqlsynthgen create-generators --config-file=config.yaml sqlsynthgen create-vocab --config-file=config.yaml -``make-generators`` downloads all the vocabulary tables to your local machine as YAML files and ``create-vocab`` uploads them to the target database. +``create-generators`` downloads all the vocabulary tables to your local machine as YAML files and ``create-vocab`` uploads them to the target database. In the CCHIC dataset we were looking at some of the vocabulary tables were several gigabytes, and downloading those as YAML files was a bad idea. Thus we rather set SSG to ignore those tables and copied them over from the source schema to the destination schema manually, which was easier to do (in our case the source and the destination were just different schemas within the same database). The ``ignore: true`` option can also be used to make SSG ignore tables that we are not interested in at all. -Note though that if one of the ignored tables is foreign key referenced by one of the tables we are `not` ignoring, the ignored table is still included in the ``orm.py`` and created by ``create-tables``, although ignored by ``make-generators`` and ``create-data``. +Note though that if one of the ignored tables is foreign key referenced by one of the tables we are `not` ignoring, the ignored table is still included in the ``orm.py`` and created by ``create-tables``, although ignored by ``create-generators`` and ``create-data``. This is necessary to not break the network of foreign key relations. It is also good, because it means that after we copy the big vocabulary tables over manually, all foreign key references and things like automatically generating default values for referencing columns work as usual. diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst index a4a62be3..86bad82e 100644 --- a/docs/source/introduction.rst +++ b/docs/source/introduction.rst @@ -62,12 +62,12 @@ And let's populate it with the fake data: export DST_DSN='postgresql://tim:password@localhost/fake_pagila' export DST_SCHEMA='public' - sqlsynthgen make-generators + sqlsynthgen create-generators sqlsynthgen create-tables sqlsynthgen create-data -``make-generators`` creates a Python file called ``ssg.py``. -You can edit this file if you want, but it is much easier to edit ``config.yaml`` and call ``sqlsynthgen make-generators --force`` to regenerate this file. +``create-generators`` creates a Python file called ``ssg.py``. +You can edit this file if you want, but it is much easier to edit ``config.yaml`` and call ``sqlsynthgen create-generators --force`` to regenerate this file. You will notice that ``create-tables`` produces a couple of warnings, and PostgreSQL complains when ``sqlsynthgen`` tries to create the data. The warnings are that ``sqlsynthgen`` doesn't understand the special PostgresSQL types ``TSVECTOR`` and ``ARRAY``, so it doesn't know how to generate data for those columns. @@ -121,10 +121,10 @@ Some of these functions take arguments, that we can assign like this: (but only static booleans, strings or numbers) -Anyway, we now need to remake the generators (``make-generators``) and re-run them (``create-data``): +Anyway, we now need to remake the generators (``create-generators``) and re-run them (``create-data``): .. code-block:: console - $ sqlsynthgen make-generators --force + $ sqlsynthgen create-generators --force $ sqlsynthgen create-data --num-passes 15 Now you can use ``psql --username tim fake_pagila`` to explore the data. @@ -295,7 +295,7 @@ If you haven't created the destination database, you may first need to run a com We can also use the ``orm.py`` file to make a Python module that generates synthetic data:: - $ sqlsynthgen make-generators + $ sqlsynthgen create-generators This creates an ``ssg.py`` file that contains one generator class (not to be confused with Python generator functions) per source database table. By default, without any user configuration, the data produced by these generators fulfills the schema of the original data: @@ -382,7 +382,7 @@ We identify ``countries`` as a vocabulary table in our ``config.yaml`` file: The vocabulary tables are exported from the source database when the generator module is made, so we overwrite ``ssg.py`` with one that includes the vocabulary import classes, using the ``--force`` option:: - $ sqlsynthgen make-generators --config-file config.yaml --force + $ sqlsynthgen create-generators --config-file config.yaml --force This will export the ``countries`` table rows to a file called ``countries.yaml`` in your current working directory: @@ -410,14 +410,14 @@ We need to truncate any tables in our destination database before importing the $ sqlsynthgen remove-data --config-file config.yaml $ sqlsynthgen create-vocab --config-file config.yaml --orm-file orm.yaml -Since ``make-generators`` rewrote ``ssg.py``, we must now re-edit it to add the primary key ``VARCHAR`` workarounds for the ``users`` and ``age_gender_bkts`` tables, as we did in section above. +Since ``create-generators`` rewrote ``ssg.py``, we must now re-edit it to add the primary key ``VARCHAR`` workarounds for the ``users`` and ``age_gender_bkts`` tables, as we did in section above. Once this is done, we can generate random data for the other three tables with:: $ sqlsynthgen create-data From now on, whenever we make a change to ``config.yaml``, we should re-run these steps to see the effects: -1. Run ``sqlsynthgen make-generators --config-file config.yaml --force``. +1. Run ``sqlsynthgen create-generators --config-file config.yaml --force``. 2. If necessary, perform any manual edits to ``ssg.py``. 3. Truncate the non-vocabulary database tables with ``sqlsynthgen remove-data --config-file config.yaml``. 4. Run ``sqlsynthgen create-data``. @@ -431,7 +431,7 @@ Note that one has to be careful in making sure that the tables marked as vocabul Specifying Row-based Custom Generators -------------------------------------- -As we’ve seen above, ``ssg.py`` is overwritten whenever you re-run ``make-generators``. +As we’ve seen above, ``ssg.py`` is overwritten whenever you re-run ``create-generators``. To avoid having to manually edit ``ssg.py`` after each overwrite, we can specify “row generators” for various columns in the config file: **config.yaml**: @@ -459,7 +459,7 @@ To avoid having to manually edit ``ssg.py`` after each overwrite, we can specify For instance, on lines 5-6 above we say that every time a row is generated for the ``agen_gender_bkts`` table, the ``generic.person.password`` function should be called (without arguments), and the output should be written to the ``gender`` column. We similarly use ``generic.person.password`` to populate ``age_gender_bkts.age_bucket`` and ``users.id``, and ``generic.column_value_provider.column_value`` (more on that one later) to populate ``country_destination``. -The next time we run ``make-generators``, these config-specified row generators will override the default ones and we will not need to edit the ``ssg.py`` manually any more. +The next time we run ``create-generators``, these config-specified row generators will override the default ones and we will not need to edit the ``ssg.py`` manually any more. You may notice in the above code block a few magical-seeming keywords, namely ``generic``, ``dst_db_conn``, and ``orm``, that deserve an explanation. @@ -623,7 +623,7 @@ which executes the query and writes the results to a ``src-stats.yaml`` file, wh This is the output of the SQL query in YAML format. To be able to use these numbers in our generators we need to regenerate ``ssg.py`` with :: - $ sqlsynthgen make-generators --config-file config.yaml --stats-file src-stats.yaml --force + $ sqlsynthgen create-generators --config-file config.yaml --stats-file src-stats.yaml --force The new option ``--stats-file src-stats.yaml`` makes it such that the ``SRC_STATS`` variable in ``ssg.py`` is populated with the concents of ``src-stats.yaml``, allowing you to pass them to your generators as arguments, as we do above in the ``config.yaml`` snippet on line 13. Note how the query name ``name: age_stats`` (line 2) is used in ``SRC_STATS["age_stats"]`` (line 13) to access the results of this particular query. @@ -814,7 +814,7 @@ To use and get the most from story generators, we will need to make some changes After editing the ``config.yaml`` and ``airbnb_generators.py`` as above, you can run: :: - $ sqlsynthgen make-generators --config-file=config.yaml --stats-file=src-stats.yaml --force + $ sqlsynthgen create-generators --config-file=config.yaml --stats-file=src-stats.yaml --force This will regenerate the ``ssg.py`` file to incorporate your story generator, and running ``create-data`` as usual will then create some storied users and sessions. diff --git a/docs/source/loan_data.rst b/docs/source/loan_data.rst index 4ccaad60..5f4b9da5 100644 --- a/docs/source/loan_data.rst +++ b/docs/source/loan_data.rst @@ -71,11 +71,11 @@ we see that they are always 0 or 1 so we will pick randomly from 0 and 1 for our .. literalinclude:: ../../examples/loans/config1.yaml :language: yaml -We run SqlSynthGen's ``make-generators`` command to create ``ssg.py``, which contains a generator class for each table in the source database: +We run SqlSynthGen's ``create-generators`` command to create ``ssg.py``, which contains a generator class for each table in the source database: .. code-block:: console - $ sqlsynthgen make-generators --config config.yaml + $ sqlsynthgen create-generators --config config.yaml We then run SqlSynthGen's ``create-tables`` command to create the tables in the destination database: @@ -108,7 +108,7 @@ We can export the vocabularies to ``.yaml`` files, delete the old synthetic data .. code-block:: console - $ sqlsynthgen make-generators + $ sqlsynthgen create-generators $ sqlsynthgen remove-data $ sqlsynthgen create-vocab $ sqlsynthgen create-data --num-passes 100 @@ -164,7 +164,7 @@ We'll need to recreate the ``ssg.py`` file, the destination database and the dat .. code-block:: console - $ sqlsynthgen make-generators --config-file config.yaml --force + $ sqlsynthgen create-generators --config-file config.yaml --force $ sqlsynthgen remove-tables --yes $ sqlsynthgen create-tables $ sqlsynthgen create-vocab @@ -216,7 +216,7 @@ As before, we will need to re-create ``ssg.py`` and the data. .. code-block:: console - $ sqlsynthgen make-generators --config-file config.yaml --force + $ sqlsynthgen create-generators --config-file config.yaml --force $ sqlsynthgen make-stats --config-file config.yaml --force $ sqlsynthgen remove-data --yes $ sqlsynthgen create-vocab diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst index 819340aa..a35f65f3 100644 --- a/docs/source/quickstart.rst +++ b/docs/source/quickstart.rst @@ -14,18 +14,24 @@ After :ref:`Installation `, we can run ``sqlsynthgen`` to see --help Show this message and exit. Commands: - create-data Populate schema with synthetic data. - create-tables Create schema from Python classes. - create-vocab Create tables using the SQLAlchemy file. - make-generators Make a SQLSynthGen file of generator classes. - make-stats Compute summary statistics from the source database. - make-tables Make a SQLAlchemy file of Table classes. - remove-data Truncate all non-vocabulary tables in the dst schema. - remove-tables Drop all tables in the dst schema. - remove-vocab Truncate all vocabulary tables in the dst schema. - validate-config Validate the format of a config file. - -For the simplest case, we will need ``make-tables``, ``make-generators``, ``create-tables`` and ``create-data`` but, first, + configure-generators Interactively set generators for column data. + configure-missing Interactively set the missingness of the... + configure-tables Interactively set tables to ignored, vocabulary... + create-data Populate the schema in the target directory with... + create-tables Create schema from the ORM YAML file. + create-vocab Import vocabulary data into the target database. + list-tables List the names of tables + create-generators Make a SQLSynthGen file of generator classes. + make-stats Compute summary statistics from the source database. + make-tables Make a YAML file representing the tables in the... + make-vocab Make files of vocabulary tables. + remove-data Truncate non-vocabulary tables in the destination... + remove-tables Drop all tables in the destination schema. + remove-vocab Truncate vocabulary tables in the destination... + validate-config Validate the format of a config file. + version Display version information. + +For the simplest case, we will need ``make-tables``, ``create-generators``, ``create-tables`` and ``create-data`` but, first, we need to set environment variables to tell sqlsynthgen how to access our source database (where the real data resides now) and destination database (where the synthetic data will go). We can do that in the terminal with the ``export`` keyword, as shown below, or in a file called ``.env``. The source and destination may be on the same database server, as long as the database or schema names differ. @@ -52,7 +58,7 @@ The next step is to make a sqlsynthgen file that defines one data generator per .. code-block:: console - $ sqlsynthgen make-generators + $ sqlsynthgen create-generators This will have created a file called ``ssg.py`` in the current directory. diff --git a/sqlsynthgen/main.py b/sqlsynthgen/main.py index e0614777..773d23c9 100644 --- a/sqlsynthgen/main.py +++ b/sqlsynthgen/main.py @@ -214,7 +214,7 @@ def make_vocab( @app.command() -def make_generators( +def create_generators( orm_file: str = Option(ORM_FILENAME, help="The name of the ORM yaml file"), ssg_file: str = Option(SSG_FILENAME, help="Path to write Python generators to."), config_file: Optional[str] = Option(CONFIG_FILENAME, help="The configuration file"), @@ -227,7 +227,7 @@ def make_generators( returns a set of synthetic data generators for each attribute Example: - $ sqlsynthgen make-generators + $ sqlsynthgen create-generators """ logger.debug("Making %s.", ssg_file) diff --git a/tests/test_functional.py b/tests/test_functional.py index a9079f58..622dcc04 100644 --- a/tests/test_functional.py +++ b/tests/test_functional.py @@ -95,7 +95,7 @@ def test_workflow_minimal_args(self) -> None: self.assertEqual("", completed_process.stdout.decode("utf-8")) completed_process = run( - ["sqlsynthgen", "make-generators", "--force"], + ["sqlsynthgen", "create-generators", "--force"], capture_output=True, env=self.env, ) @@ -256,7 +256,7 @@ def test_workflow_maximal_args(self) -> None: completed_process = run( [ "sqlsynthgen", - "make-generators", + "create-generators", f"--orm-file={self.alt_orm_file_path}", f"--ssg-file={self.alt_ssg_file_path}", f"--config-file={self.config_file_path}", @@ -510,7 +510,7 @@ def test_unique_constraint_fail(self) -> None: run( [ "sqlsynthgen", - "make-generators", + "create-generators", f"--orm-file={self.alt_orm_file_path}", f"--ssg-file={self.alt_ssg_file_path}", f"--config-file={self.config_file_path}", diff --git a/tests/test_main.py b/tests/test_main.py index 5c860344..eb620f33 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -35,14 +35,14 @@ def test_create_vocab(self, mock_create: MagicMock, mock_import: MagicMock) -> N @patch("sqlsynthgen.main.import_file") @patch("sqlsynthgen.main.Path") @patch("sqlsynthgen.main.make_table_generators") - def test_make_generators( + def test_create_generators( self, mock_make: MagicMock, mock_path: MagicMock, mock_import: MagicMock, mock_settings: MagicMock, ) -> None: - """Test the make-generators sub-command.""" + """Test the create-generators sub-command.""" mock_path.return_value.exists.return_value = False mock_make.return_value = "some text" mock_settings.return_value.src_postges_dsn = "" @@ -50,7 +50,7 @@ def test_make_generators( result = runner.invoke( app, [ - "make-generators", + "create-generators", ], catch_exceptions=False, ) @@ -65,10 +65,10 @@ def test_make_generators( @patch("sqlsynthgen.main.Path") @patch("sqlsynthgen.main.logger") - def test_make_generators_errors_if_file_exists( + def test_create_generators_errors_if_file_exists( self, mock_logger: MagicMock, mock_path: MagicMock ) -> None: - """Test the make-generators sub-command doesn't overwrite.""" + """Test the create-generators sub-command doesn't overwrite.""" mock_path.return_value.exists.return_value = True mock_path.return_value.__str__.return_value = "ssg.py" @@ -76,7 +76,7 @@ def test_make_generators_errors_if_file_exists( result = runner.invoke( app, [ - "make-generators", + "create-generators", ], catch_exceptions=False, ) @@ -86,14 +86,14 @@ def test_make_generators_errors_if_file_exists( self.assertEqual(1, result.exit_code) @patch("sqlsynthgen.main.logger") - def test_make_generators_errors_if_src_dsn_missing( + def test_create_generators_errors_if_src_dsn_missing( self, mock_logger: MagicMock ) -> None: - """Test the make-generators sub-command with missing db params.""" + """Test the create-generators sub-command with missing db params.""" result = runner.invoke( app, [ - "make-generators", + "create-generators", ], catch_exceptions=False, ) @@ -106,14 +106,14 @@ def test_make_generators_errors_if_src_dsn_missing( @patch("sqlsynthgen.main.Path") @patch("sqlsynthgen.main.import_file") @patch("sqlsynthgen.main.make_table_generators") - def test_make_generators_with_force_enabled( + def test_create_generators_with_force_enabled( self, mock_make: MagicMock, mock_import: MagicMock, mock_path: MagicMock, mock_settings: MagicMock, ) -> None: - """Tests the make-generators sub-commands overwrite files when instructed.""" + """Tests the create-generators sub-commands overwrite files when instructed.""" mock_path.return_value.exists.return_value = True mock_make.return_value = "make result" @@ -121,7 +121,7 @@ def test_make_generators_with_force_enabled( for force_option in ["--force", "-f"]: with self.subTest(f"Using option {force_option}"): - result: Result = runner.invoke(app, ["make-generators", force_option]) + result: Result = runner.invoke(app, ["create-generators", force_option]) mock_make.assert_called_once_with( mock_import.return_value, {}, None, overwrite_files=True diff --git a/tests/test_make.py b/tests/test_make.py index ec2feab0..575cffe4 100644 --- a/tests/test_make.py +++ b/tests/test_make.py @@ -70,7 +70,7 @@ def test_make_table_generators( @patch("sqlsynthgen.make.Path") @patch("sqlsynthgen.make.get_settings") @patch("sqlsynthgen.utils.create_engine") - def test_make_generators_do_not_overwrite( + def test_create_generators_do_not_overwrite( self, mock_create: MagicMock, mock_get_settings: MagicMock, @@ -99,7 +99,7 @@ def test_make_generators_do_not_overwrite( @patch("sqlsynthgen.utils.create_engine") @patch("sqlsynthgen.make.get_settings") @patch("sqlsynthgen.make.Path") - def test_make_generators_force_overwrite( + def test_create_generators_force_overwrite( self, mock_path: MagicMock, mock_get_settings: MagicMock, From 8f46dea23ba0d5bc2fe49f03430f06fb01213497 Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 20 May 2025 13:25:03 +0100 Subject: [PATCH 84/85] Various fixes --- sqlsynthgen/base.py | 28 ++++++++++++++-------------- sqlsynthgen/generators.py | 22 +++++++++++----------- sqlsynthgen/interactive.py | 6 +++--- sqlsynthgen/utils.py | 2 +- tests/test_interactive.py | 28 ++++++++++++++-------------- 5 files changed, 43 insertions(+), 43 deletions(-) diff --git a/sqlsynthgen/base.py b/sqlsynthgen/base.py index 969d5525..d400380c 100644 --- a/sqlsynthgen/base.py +++ b/sqlsynthgen/base.py @@ -4,9 +4,9 @@ from dataclasses import dataclass import functools import math -import numpy import os from pathlib import Path +import random from typing import Any import yaml @@ -33,25 +33,27 @@ def zipf_weights(size): class DistributionGenerator: root3 = math.sqrt(3) - def __init__(self): - self.rng = numpy.random.default_rng() - def uniform(self, low: float, high: float) -> float: - return self.rng.uniform(low=low, high=high) + def uniform(self, low, high) -> float: + return random.uniform(float(low), float(high)) def uniform_ms(self, mean, sd) -> float: m = float(mean) h = self.root3 * float(sd) - return self.rng.uniform(low=m - h, high=m + h) + return random.uniform(m - h, m + h) - def normal(self, mean: float, sd: float) -> float: - return self.rng.normal(loc=mean, scale=sd) + def normal(self, mean, sd) -> float: + return random.normalvariate(float(mean), float(sd)) def choice(self, a): - return self.rng.choice(a) + c = random.choice(a) + return c["value"] if type(c) is dict and "value" in c else c - def zipf_choice(self, a, n): - return self.rng.choice(a, p = zipf_weights(n)) + def zipf_choice(self, a, n=None): + if n is None: + n = len(a) + c = random.choices(a, weights=zipf_weights(n))[0] + return c["value"] if type(c) is dict and "value" in c else c def constant(self, value): return value @@ -125,13 +127,11 @@ def load(self, connection: Connection) -> None: ) class ColumnPresence: - def __init__(self): - self.rng = numpy.random.default_rng() def sampled(self, patterns): total = 0 for pattern in patterns: total += pattern.get("row_count", 0) - s = self.rng.integers(total) + s = random.randrange(total) for pattern in patterns: s -= pattern.get("row_count", 0) if s < 0: diff --git a/sqlsynthgen/generators.py b/sqlsynthgen/generators.py index a4cc048e..f689949f 100644 --- a/sqlsynthgen/generators.py +++ b/sqlsynthgen/generators.py @@ -50,9 +50,9 @@ def nominal_kwargs(self) -> dict[str, str]: The kwargs the generator wants to be called with. The values will tend to be references to something in the src-stats.yaml file. - For example {"avg_age": 'SRC_STATS["auto__patient"]["age_mean"]'} will + For example {"avg_age": 'SRC_STATS["auto__patient"][0]["age_mean"]'} will provide the value stored in src-stats.yaml as - SRC_STATS["auto__patient"]["age_mean"] as the "avg_age" argument + SRC_STATS["auto__patient"][0]["age_mean"] as the "avg_age" argument to the generator function. """ @@ -64,8 +64,8 @@ def select_aggregate_clauses(self) -> dict[str, str]: For example {"count": "COUNT(*)", "avg_thiscolumn": "AVG(thiscolumn)"} will make the clause become: "SELECT COUNT(*) AS count, AVG(thiscolumn) AS avg_thiscolumn FROM thistable" - and this will populate SRC_STATS["auto__thistable"]["count"] and - SRC_STATS["auto__thistable"]["avg_thiscolumn"] in the src-stats.yaml file. + and this will populate SRC_STATS["auto__thistable"][0]["count"] and + SRC_STATS["auto__thistable"][0]["avg_thiscolumn"] in the src-stats.yaml file. """ return {} @@ -77,7 +77,7 @@ def custom_queries(self) -> dict[str, str]: because these should use select_aggregate_clauses. For example {"myquery", "SELECT one, too AS two FROM mytable WHERE too > 1"} - will populate SRC_STATS["myquery"]["one"] and SRC_STATS["myquery"]["two"] + will populate SRC_STATS["myquery"][0]["one"] and SRC_STATS["myquery"][0]["two"] in the src-stats.yaml file. Keys should be chosen to minimize the chances of clashing with other queries, @@ -365,7 +365,7 @@ def __init__(self, column: Column, function_name: str, min_year: str, max_year: @classmethod def make_singleton(_cls, column: Column, engine: Engine, function_name: str): - extract_year = f"EXTRACT(YEAR FROM {column.name})" + extract_year = f"CAST(EXTRACT(YEAR FROM {column.name}) AS INT)" max_year = f"MAX({extract_year})" min_year = f"MIN({extract_year})" with engine.connect() as connection: @@ -384,8 +384,8 @@ def make_singleton(_cls, column: Column, engine: Engine, function_name: str): )] def nominal_kwargs(self): return { - "start": f'SRC_STATS["auto__{self._column.table.name}"]["{self._column.name}__start"]', - "end": f'SRC_STATS["auto__{self._column.table.name}"]["{self._column.name}__end"]', + "start": f'SRC_STATS["auto__{self._column.table.name}"][0]["{self._column.name}__start"]', + "end": f'SRC_STATS["auto__{self._column.table.name}"][0]["{self._column.name}__end"]', } def actual_kwargs(self): return { @@ -529,8 +529,8 @@ def __init__(self, table_name: str, column_name: str, buckets: Buckets): self.buckets = buckets def nominal_kwargs(self): return { - "mean": f'SRC_STATS["auto__{self.table_name}"]["mean__{self.column_name}"]', - "sd": f'SRC_STATS["auto__{self.table_name}"]["stddev__{self.column_name}"]', + "mean": f'SRC_STATS["auto__{self.table_name}"][0]["mean__{self.column_name}"]', + "sd": f'SRC_STATS["auto__{self.table_name}"][0]["stddev__{self.column_name}"]', } def actual_kwargs(self): if self.buckets is None: @@ -617,7 +617,7 @@ def __init__(self, table_name, column_name, values, counts): self._fit = fit_from_buckets(counts, estimated_counts) def nominal_kwargs(self): return { - "a": f'SRC_STATS["auto__{self.table_name}__{self.column_name}"]["value"]', + "a": f'SRC_STATS["auto__{self.table_name}__{self.column_name}"]', } def actual_kwargs(self): return { diff --git a/sqlsynthgen/interactive.py b/sqlsynthgen/interactive.py index 6b932b9a..d4c8a4d7 100644 --- a/sqlsynthgen/interactive.py +++ b/sqlsynthgen/interactive.py @@ -1168,7 +1168,7 @@ def _print_custom_queries(self, gen: Generator) -> None: return kwa = gen.actual_kwargs() cq_key2args = {} - src_stat_re = re.compile(f'SRC_STATS\\["([^"]+)"\\]\\["([^"]+)"\\]') + src_stat_re = re.compile(f'SRC_STATS\\["([^"]+)"\\]') for argname, src_stat in gen.nominal_kwargs().items(): if argname in kwa: src_stat_groups = src_stat_re.match(src_stat) @@ -1201,7 +1201,7 @@ def _print_select_aggregate_query(self, table_name, gen: Generator) -> None: vals = [] src_stat2kwarg = { v: k for k, v in gen.nominal_kwargs().items() } for n in sacs.keys(): - src_stat = f'SRC_STATS["auto__{table_name}"]["{n}"]' + src_stat = f'SRC_STATS["auto__{table_name}"][0]["{n}"]' if src_stat in src_stat2kwarg: ak = src_stat2kwarg[src_stat] if ak in kwa: @@ -1209,7 +1209,7 @@ def _print_select_aggregate_query(self, table_name, gen: Generator) -> None: else: logger.warning("actual_kwargs for %s does not report %s", gen.function_name(), ak) else: - logger.warning('nominal_kwargs for %s does not have a value SRC_STATS["auto__%s"]["%s"]', gen.function_name(), table_name, n) + logger.warning('nominal_kwargs for %s does not have a value SRC_STATS["auto__%s"][0]["%s"]', gen.function_name(), table_name, n) select_q = self._get_aggregate_query([gen], table_name) self.print("{0}; providing the following values: {1}", select_q, vals) diff --git a/sqlsynthgen/utils.py b/sqlsynthgen/utils.py index be45ffb5..fba9900d 100644 --- a/sqlsynthgen/utils.py +++ b/sqlsynthgen/utils.py @@ -110,7 +110,7 @@ def download_table( ) -> None: """Download a Table and store it as a .yaml file.""" open_fn = open_compressed_file if compress else open_file - with engine.connect() as conn: + with engine.connect().execution_options(yield_per=1000) as conn: with open_fn(yaml_file_name) as yamlfile: stmt = select(table) rowcount = table_row_count(table, conn) diff --git a/tests/test_interactive.py b/tests/test_interactive.py index b00a684c..6da12e38 100644 --- a/tests/test_interactive.py +++ b/tests/test_interactive.py @@ -375,8 +375,8 @@ def test_set_generator_distribution(self): self.assertEqual(row_gen["name"], GENERATOR) self.assertListEqual(row_gen["columns_assigned"], [COLUMN]) self.assertDictEqual(row_gen["kwargs"], { - "mean": f'SRC_STATS["auto__{TABLE}"]["mean__{COLUMN}"]', - "sd": f'SRC_STATS["auto__{TABLE}"]["stddev__{COLUMN}"]', + "mean": f'SRC_STATS["auto__{TABLE}"][0]["mean__{COLUMN}"]', + "sd": f'SRC_STATS["auto__{TABLE}"][0]["stddev__{COLUMN}"]', }) self.assertEqual(len(gc.config["src-stats"]), 1) self.assertDictEqual(gc.config["src-stats"][0], { @@ -403,7 +403,7 @@ def test_set_generator_choice(self): self.assertEqual(row_gen["name"], GENERATOR) self.assertListEqual(row_gen["columns_assigned"], [COLUMN]) self.assertDictEqual(row_gen["kwargs"], { - "a": f'SRC_STATS["auto__{TABLE}__{COLUMN}"]["value"]', + "a": f'SRC_STATS["auto__{TABLE}__{COLUMN}"]', }) self.assertEqual(len(gc.config["src-stats"]), 1) self.assertDictEqual(gc.config["src-stats"][0], { @@ -422,8 +422,8 @@ def test_old_generators_remain(self): "name": "dist_gen.normal", "columns_assigned": ["frequency"], "kwargs": { - "mean": 'SRC_STATS["auto__string"]["mean__frequency"]', - "sd": 'SRC_STATS["auto__string"]["stddev__frequency"]', + "mean": 'SRC_STATS["auto__string"][0]["mean__frequency"]', + "sd": 'SRC_STATS["auto__string"][0]["stddev__frequency"]', }, }] } @@ -453,8 +453,8 @@ def test_old_generators_remain(self): self.assertEqual(row_gen["name"], "dist_gen.normal") self.assertListEqual(row_gen["columns_assigned"], ["frequency"]) self.assertDictEqual(row_gen["kwargs"], { - "mean": 'SRC_STATS["auto__string"]["mean__frequency"]', - "sd": 'SRC_STATS["auto__string"]["stddev__frequency"]', + "mean": 'SRC_STATS["auto__string"][0]["mean__frequency"]', + "sd": 'SRC_STATS["auto__string"][0]["stddev__frequency"]', }) self.assertEqual(len(gc.config["src-stats"]), 1) self.assertDictEqual(gc.config["src-stats"][0], { @@ -476,8 +476,8 @@ def test_aggregate_queries_merge(self): "name": "dist_gen.normal", "columns_assigned": ["frequency"], "kwargs": { - "mean": 'SRC_STATS["auto__string"]["mean__frequency"]', - "sd": 'SRC_STATS["auto__string"]["stddev__frequency"]', + "mean": 'SRC_STATS["auto__string"][0]["mean__frequency"]', + "sd": 'SRC_STATS["auto__string"][0]["stddev__frequency"]', }, }] } @@ -507,13 +507,13 @@ def test_aggregate_queries_merge(self): self.assertEqual(row_gen1["name"], "dist_gen.normal") self.assertListEqual(row_gen0["columns_assigned"], [COLUMN]) self.assertDictEqual(row_gen0["kwargs"], { - "mean": f'SRC_STATS["auto__string"]["mean__{COLUMN}"]', - "sd": f'SRC_STATS["auto__string"]["stddev__{COLUMN}"]', + "mean": f'SRC_STATS["auto__string"][0]["mean__{COLUMN}"]', + "sd": f'SRC_STATS["auto__string"][0]["stddev__{COLUMN}"]', }) self.assertListEqual(row_gen1["columns_assigned"], ["frequency"]) self.assertDictEqual(row_gen1["kwargs"], { - "mean": 'SRC_STATS["auto__string"]["mean__frequency"]', - "sd": 'SRC_STATS["auto__string"]["stddev__frequency"]', + "mean": 'SRC_STATS["auto__string"][0]["mean__frequency"]', + "sd": 'SRC_STATS["auto__string"][0]["stddev__frequency"]', }) self.assertEqual(len(gc.config["src-stats"]), 1) self.assertEqual(gc.config["src-stats"][0]["name"], "auto__string") @@ -683,7 +683,7 @@ def test_create_with_missingness(self): loop.close() with os.fdopen(stats_fd, "w", encoding="utf-8") as stats_fh: stats_fh.write(yaml.dump(src_stats)) - # `make-generators` with `src-stats.yaml` and the rest, producing `ssg.py` + # `create-generators` with `src-stats.yaml` and the rest, producing `ssg.py` ssg_content = make_table_generators( metadata, config, From 08b4896ed0f8b5c34834ebc017e57a81daedbcbd Mon Sep 17 00:00:00 2001 From: Tim Band Date: Tue, 20 May 2025 16:16:20 +0100 Subject: [PATCH 85/85] Updated quick start guide --- docs/source/installation.rst | 3 + docs/source/quickstart.rst | 323 ++++++++++++++++++++++++++++++++--- 2 files changed, 304 insertions(+), 22 deletions(-) diff --git a/docs/source/installation.rst b/docs/source/installation.rst index 7e7e56c8..bf63de77 100644 --- a/docs/source/installation.rst +++ b/docs/source/installation.rst @@ -14,3 +14,6 @@ Check that you can view the help message with: .. code-block:: console $ sqlsynthgen --help + +It can also be used directly within a Docker container by downloading image ``timband/ssg``. +See the :ref:`quickstart guide ` for more information. diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst index a35f65f3..0fb40455 100644 --- a/docs/source/quickstart.rst +++ b/docs/source/quickstart.rst @@ -3,6 +3,9 @@ Quick Start =========== +Overview +-------- + After :ref:`Installation `, we can run ``sqlsynthgen`` to see the available commands: .. code-block:: console @@ -18,10 +21,10 @@ After :ref:`Installation `, we can run ``sqlsynthgen`` to see configure-missing Interactively set the missingness of the... configure-tables Interactively set tables to ignored, vocabulary... create-data Populate the schema in the target directory with... + create-generators Make a SQLSynthGen file of generator classes. create-tables Create schema from the ORM YAML file. create-vocab Import vocabulary data into the target database. list-tables List the names of tables - create-generators Make a SQLSynthGen file of generator classes. make-stats Compute summary statistics from the source database. make-tables Make a YAML file representing the tables in the... make-vocab Make files of vocabulary tables. @@ -31,54 +34,330 @@ After :ref:`Installation `, we can run ``sqlsynthgen`` to see validate-config Validate the format of a config file. version Display version information. -For the simplest case, we will need ``make-tables``, ``create-generators``, ``create-tables`` and ``create-data`` but, first, -we need to set environment variables to tell sqlsynthgen how to access our source database (where the real data resides now) and destination database (where the synthetic data will go). -We can do that in the terminal with the ``export`` keyword, as shown below, or in a file called ``.env``. -The source and destination may be on the same database server, as long as the database or schema names differ. -If the source and destination schemas are the default schema for the user on that database, you should not set those variables. -If you are using a DBMS that does not support schemas (e.g. MariaDB), you must not set those variables. +sqlsynthgen is designed to be run connected to either the private source database or the more public destination database. It never needs to be connected to both. +So you can install sqlsynthgen on a machine with access to the private source database that will do the reading, and again on another machine that will do the creation. + +In this guide we will walk through configuring column generators. We will not discuss stories here. + +Connecting to the source database +--------------------------------- + +To connect to the source database, set the ``SRC_DSN`` and ``SRC_SCHEMA`` environment variables. +You can leave ``SRC_SCHEMA`` unset if you are using the default schema (or a database that does not use schema names such as MariaDB): + +MacOS or Linux: .. code-block:: console $ export SRC_DSN="postgresql://someuser:somepassword@myserver.mydomain.com" $ export SRC_SCHEMA='myschema' - $ export DST_DSN="postgresql://someuser:somepassword@myserver.mydomain.com/dst_db" - $ export DST_SCHEMA='myschema' +Windows Command Shell: + +.. code-block:: console + + $ set SRC_DSN "postgresql://someuser:somepassword@myserver.mydomain.com" + $ set SRC_SCHEMA "myschema" -Next, we make a SQLAlchemy file that defines the structure of your database using the ``make-tables`` command: +Running from the ready-built Docker container, make an output directory then use that as the data volume like so (please use WSL for this on Windows): + +.. code-block:: console + + $ mkdir output + $ docker run --rm --user $(id -u):$(id -g) --network host -e SRC_SCHEMA=myschema -e DST_DSN=postgresql://someuser:somepassword@myserver.mydomain.com -itv ./output:data --pull always timband/ssg + +Now you can use the commands that use the source database (the ones beginning ``configure-`` and ``make-`` but not the ones beginning ``create-`` and ``remove-``). + +Initial configuration +--------------------- + +The first job is to read the structure of the source database: .. code-block:: console $ sqlsynthgen make-tables -This will have created a file called ``orm.py`` in the current directory, with a SQLAlchemy class for each of your tables. +This will create a file called ``orm.yaml``. You should not need to edit this file. -The next step is to make a sqlsynthgen file that defines one data generator per table in the source database: +Configuring table types +----------------------- + +Next you can use the ``configure-tables`` command categorize each of your source tables into one of five types: + +* ``private`` for tables that are Primary Private, that is the tables containing the subjects of privacy (the table of hospital patients, for example). Not every table containing sensitive data needs to be marked private, only the table directly referring to the individuals (or families) that need to be protected. +* ``ignore`` for tables that should not be present in the destination database +* ``empty`` for tables that should contain no data, but be present (also for tables that should be populated entirely from stories, see later) +* ``vocabulary`` for tables that should be reproduced exactly in the destination database +* ``normal`` for everything else + +This command will start an interactive command shell. Don't be intimidated, just type ``?`` (and press return) to get help: .. code-block:: console - $ sqlsynthgen create-generators + $ sqlsynthgen configure-tables + Interactive table configuration (ignore, vocabulary, private, normal or empty). Type ? for help. -This will have created a file called ``ssg.py`` in the current directory. + (table: myfirsttable) ? -We can use the ``create-table`` command to read the ``orm.py`` file, create our destination schema (if it doesn't already exist), and to create empty copies of all the tables that are in the source database. + Use the commands 'ignore', 'vocabulary', + 'private', 'empty' or 'normal' to set the table's type. Use 'next' or + 'previous' to change table. Use 'tables' and 'columns' for + information about the database. Use 'data', 'peek', 'select' or + 'count' to see some data contained in the current table. Use 'quit' + to exit this program. + Documented commands (type help ): + ======================================== + columns data help next peek private select vocabulary + counts empty ignore normal previous quit tables + + (table: myfirsttable) + +You can also get help for any of the commands listed; for example to see help for the ``vocabulary`` command type ``? vocabulary`` or ``help vocabulary``: .. code-block:: console - $ sqlsynthgen create-tables + (table: myfirsttable) help vocabulary + Set the current table as a vocabulary table, and go to the next table + (table: myfirsttable) + +Note that the prompt here is ``(table: myfirsttable)``. This will be different on your database; it will show the name of the table that is currently under consideration. + +Tab completion +^^^^^^^^^^^^^^ + +You can use the Tab key on your keyboard to shorten these commands. Try typing h-tab-space-v-tab-return, and you will get ``help vocabulary`` again. +Some commands require a little more. Try typing h-tab-p-tab and you will see that the ``p`` does not get expanded to ``private`` because there is more than one possibility (it could be ``peek`` or ``previous``). +Press the Tab key again to see these options: + +.. code-block:: console + + (table: actor) help p + peek previous private + (table: actor) help p + +Now you can continue with r-i-tab to get ``private``, r-e-tab to get ``previous`` or e-tab to get ``peek``. This can be very useful; try pressing Tab twice on an empty line to see quickly all the possible commands, for example! + +Navigating the database +^^^^^^^^^^^^^^^^^^^^^^^ + +Use ``next`` and ``previous`` to go forwards and backwards through the list of tables. +You can use ``next tablename`` to go to the table ``tablename`` (tab completion works here too!) +You can use ``tables`` to list all the tables and any configuration you have already done. + +Setting the type of the table +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Use ``private``, ``ignore``, ``empty``, ``vocabulary`` or ``normal`` to set the type of the table. Any you don't set will be ``normal``. +If you have previously run ``configure-tables`` (or edited the ``config.yaml`` file yourself!) the previously set types will be preserved unless you change them. + +Examining the data +^^^^^^^^^^^^^^^^^^ + +But how do you know which type to choose? You can sample the data in the table to help here: + +* ``data`` is the easiest: it shows a sample of ten complete rows from the database. +* ``data 20`` if you want more (or fewer) than ten lines, add how many lines you want. +* ``data 20 columnname`` if you want to see just one column, use this formulation with the name of the column you want to examine. +* ``data 20 columnname 30`` adding one extra number here restricts the sampling to only entries at least as long as this number of characters. You can use this to find odd descriptions that people have put into strange places in the database. +* ``columns`` shows structural data about the table +* ``counts`` tells you how many NULLs are in each column (not so useful here, perhaps) +* ``peek column1 column2`` is like ``data`` but restricted to the columns you specified (and it will not show fully NULL rows, so use this to see data in sparse columns) +* and if none of that satisfies you, type any SQL query beginning with ``select`` to get the first 50 results from an arbitrary query. + +Repeat last command +^^^^^^^^^^^^^^^^^^^ + +Entering an empty command will repeat the last command. +So if you want more data than ``data`` gives you, you can type d-a-t-a-return-return-return, +or if you want to step through tables without altering them, you can type n-e-x-t-return-return-return-... + +When you are finished +^^^^^^^^^^^^^^^^^^^^^ + +Use the command ``quit``. It will then ask you if you want to save the results. +You must type ``yes`` to save, ``no`` to exit without saving or ``cancel`` to return to the ``configure-tables`` command prompt. +You must type one of these three options in full but tab completion is available, so y-tab-return, n-tab-return or c-tab-return will do! + +Configuring column generators +----------------------------- + +The ``configure-generators`` command is similar to ``configure-tables``, but here you are configuring each column in ``normal`` and ``private`` tables. + +The ``next``, ``previous``, ``peek``, ``columns``, ``select``, ``tables``, ``counts``, ``help`` and ``quit`` work as before, but ``next`` allows you to visit not just a different table but also any column with the ``next table.column`` syntax. + +``info`` gives you simple information on the current column. Use this while you are getting used to configuring generators. + +Configuring a column generator has three steps: -Now that we have created the schema that will hold synthetic data, we can use the ``create-data`` command to read ``orm.py`` & ``ssg.py`` and generate data: +1. ``propose`` shows you a list of built-in generators that would be appropriate for this column +2. ``compare`` allows you to see the output from these generators together with the data each generator requires from the database. +3. ``set`` allows you to set the generator from the proposal list (or ``unset`` removes any previously set generator) + +Propose +^^^^^^^ + +``propose`` will provide a list of suitable generators, attempting to list them by relevance (might not do a fantastic job): + +.. code-block:: console + + (film.length) propose + Sample of actual source data: 173,73,172,81,86... + 1. dist_gen.uniform_ms: (fit: 1.19e-05) 107.55835091131807, 108.68424131615669, 76.18479907993151, 124.02617636581346, 142.3863993456911 ... + 2. dist_gen.normal: (fit: 0.000109) 94.49927930013584, 69.6024952777228, 101.74949693935817, 22.45166839395958, 76.40908811297868 ... + 3. dist_gen.choice: (fit: 0.0346) 155, 86, 89, 178, 166 ... + 4. dist_gen.zipf_choice: (fit: 2) 75, 53, 179, 179, 135 ... + 5. generic.person.weight: (no fit) 85, 73, 69, 58, 81 ... + 6. dist_gen.constant: (no fit) None, None, None, None, None ... + (film.length) + +Here we can see the first line is a small sample of data from the real column in the source database. +The other lines have four elements: + +* ``3.`` is the number of the generator, we will need that later! +* ``dist_gen.choice`` is the name of the generator +* ``(fit: 0.0346)`` is a measure of how good sqlsynthgen thinks the generator is (not necessarily a very good measure) +* ``155, 86, 89, 178, 166 ...`` is a sample of data from this generator + +For more information, we need the next command, ``compare``. + +Compare +^^^^^^^ + +In the previous example, we might consider that ``1``, ``2``, ``3`` and ``4`` are worth investigating further, so we try: .. code-block:: console - $ sqlsynthgen create-data + (film.length) compare 1 2 3 4 + Not private + 1. dist_gen.uniform_ms requires the following data from the source database: + SELECT AVG(length) AS mean__length, STDDEV(length) AS stddev__length FROM film; providing the following values: [Decimal('115.2720000000000000'), Decimal('40.4263318185598470')] + 2. dist_gen.normal requires the following data from the source database: + SELECT AVG(length) AS mean__length, STDDEV(length) AS stddev__length FROM film; providing the following values: [Decimal('115.2720000000000000'), Decimal('40.4263318185598470')] + 3. dist_gen.choice requires the following data from the source database: + SELECT length AS value FROM film GROUP BY value ORDER BY COUNT(length) DESC; providing the following values: [[85, 179, 112, 84, 74, 100, 73, 102, 48, 122, 92, 139, 114, 61, 107, 75, 181, 176, 178, 80, 185, 135, 63, 50, 137, 136, 59, 53, 152, 110, 103, 161, 126, 64, 153, 147, 120, 172, 121, 144, 150, 67, 60, 184, 93, 132, 98, 99, 118, 171, 113, 58, 71, 51, 70, 52, 101, 180, 115, 65, 173, 82, 125, 57, 151, 163, 167, 109, 111, 123, 128, 142, 141, 154, 47, 76, 145, 148, 129, 143, 157, 79, 182, 54, 83, 91, 130, 69, 87, 169, 78, 159, 158, 155, 119, 160, 106, 62, 177, 104, 174, 105, 89, 149, 175, 138, 77, 134, 133, 162, 146, 117, 166, 68, 46, 127, 183, 108, 140, 49, 56, 165, 131, 90, 86, 97, 164, 170, 94, 116, 72, 156, 124, 88, 168, 81, 95, 96, 55, 66]] + 4. dist_gen.zipf_choice requires the following data from the source database: + SELECT length AS value FROM film GROUP BY value ORDER BY COUNT(length) DESC; providing the following values: [[85, 179, 112, 84, 74, 100, 73, 102, 48, 122, 92, 139, 114, 61, 107, 75, 181, 176, 178, 80, 185, 135, 63, 50, 137, 136, 59, 53, 152, 110, 103, 161, 126, 64, 153, 147, 120, 172, 121, 144, 150, 67, 60, 184, 93, 132, 98, 99, 118, 171, 113, 58, 71, 51, 70, 52, 101, 180, 115, 65, 173, 82, 125, 57, 151, 163, 167, 109, 111, 123, 128, 142, 141, 154, 47, 76, 145, 148, 129, 143, 157, 79, 182, 54, 83, 91, 130, 69, 87, 169, 78, 159, 158, 155, 119, 160, 106, 62, 177, 104, 174, 105, 89, 149, 175, 138, 77, 134, 133, 162, 146, 117, 166, 68, 46, 127, 183, 108, 140, 49, 56, 165, 131, 90, 86, 97, 164, 170, 94, 116, 72, 156, 124, 88, 168, 81, 95, 96, 55, 66]] + +--------+------------------------+--------------------+--------------------+-------------------------+ + | source | 1. dist_gen.uniform_ms | 2. dist_gen.normal | 3. dist_gen.choice | 4. dist_gen.zipf_choice | + +--------+------------------------+--------------------+--------------------+-------------------------+ + | 60 | 46.632794372002664 | 87.89991176975211 | 96 | 59 | + | 56 | 96.17573671882317 | 143.27403823693294 | 145 | 67 | + | 167 | 158.2777826396661 | 69.60827255211873 | 99 | 107 | + | 160 | 48.91052171988566 | 101.08450212269153 | 108 | 85 | + | 64 | 151.7534973807259 | 46.65796712446469 | 106 | 136 | + | 138 | 92.64980389758904 | 129.6901021567232 | 109 | 122 | + | 109 | 62.851359423566414 | 96.26116817758401 | 158 | 85 | + | 74 | 68.29348043746441 | 33.58822018478509 | 85 | 84 | + | 75 | 123.84806734660017 | 91.6033632909829 | 53 | 61 | + | 143 | 59.016661941662406 | 175.02921918869674 | 62 | 181 | + | 62 | 77.0672702141529 | 153.55365499492189 | 185 | 147 | + | 75 | 126.53040995684793 | 137.32698597697157 | 102 | 179 | + | 162 | 125.58699420416819 | 113.8898812686725 | 94 | 85 | + | 157 | 96.93359267654796 | 61.654471841517044 | 97 | 180 | + | 117 | 181.0134365019266 | 91.93492164429024 | 57 | 85 | + | 61 | 75.68573964087891 | 115.79796856358605 | 141 | 102 | + | 73 | 85.37110501852806 | 141.1104329209363 | 51 | 137 | + | 110 | 136.56146532743944 | 112.04603094742818 | 127 | 139 | + | 67 | 152.49478264537873 | 146.82247056721147 | 51 | 74 | + | 109 | 129.69326718355967 | 111.24264422243346 | 61 | 85 | + +--------+------------------------+--------------------+--------------------+-------------------------+ + (film.length) + +The first line is telling us whether the table is Primary Private (``private`` in ``configure-tables``), Secondary Private (refers to a Primary Private table) or Not Private. +The next lines tell us, for each generator we chose, the query it needs running on the database and what data that results in. +The table below that is a sample from the source database and each generator. + +Set and unset +^^^^^^^^^^^^^ -By default, ``create-data`` will have inserted one row per table and will have used the column data types to decide how to randomly generate data. -To create more data each time we call ``create-data``, we can provide the ``num-passes`` argument: +Say we decide on generator 2, we can set this with ``set 2``. +``unset`` removes any previously set generator. + +Configuring missingness +----------------------- + +The ``configure-missing`` command is also similar to ``configure-tables``, but here you are configuring the patterns of NULLs within tables. + +This configuration can only really cope with MCAR (Missing Completely at Random) data. +This means we cannot specify that certain patterns of NULLs are more or less likely depending on the generated values for certain fields. Something for future development. + +At the moment there are only two missingness generators. +Use command ``none`` to set that no NULLs will be generated (unless the generator itelf generates them). +Use the command ``sampled`` to set that the NULLs are generated according to a sample of rows from the database. +The ``sampled`` missingness generator samples 1000 rows from the table, and generates missingness patterns present in these rows in proportion to how common they are in this sample. +This gives a reasonable approximation to the missingness patterns in the original data. + +The other commands ``counts``, ``help``, ``next``, ``peek``, ``previous``, ``quit``, ``select`` and ``tables`` work the same as before. + +Generating the data +------------------- + +Now you have files ``orm.yaml`` (generated with ``make-tables``) and ``config.yaml`` (generated from the ``generate-`` commands). +You also need two more. Run the following commands: .. code-block:: console - $ sqlsynthgen create-data --num-passes=10 + $ sqlsynthgen make-stats + $ sqlsynthgen make-vocab --compress --no-force + +The first of these generates a files ``src-stats.yaml`` containing summary statistics from the database that the generators need. +The second generates files ``tablename.yaml.gz`` containing data from the vocabulary tables. WARNING: this can take many hours depending on how big they are! +``--compress`` compresses the files with gzip, which might be necessary if the machine sqlsynthgen is running on risks running out of disk space. +``-no-force`` is necessary if you have had to interrupt the process previously and want to keep your existing files; it will generate only files that do not already exist. +If you had to stop ``make-vocab`` (or it got stopped for some other reason) you will need to check which of your ``.gz`` files are complete. You can use ``gzip -t filename.gz`` for this. + +Taking files out of the private network +--------------------------------------- + +You now have ``orm.yaml``, ``config.yaml``, ``src-stats.yaml`` and all the ``tablename.yaml.gz`` files. +These can all be checked for compliance with any privacy checks you are using then sent out of the private network. + +Connecting to the destination database +-------------------------------------- + +Just like connecting to the source database, we will use environment variables, either in Bash, Windows Command Shell or docker: + +MacOS or Linux: + +.. code-block:: console + + $ export DST_DSN="postgresql://someuser:somepassword@myserver.mydomain.com/dst_db" + $ export DST_SCHEMA='myschema' + +Windows Command Shell: + +.. code-block:: console + + $ set DST_DSN "postgresql://someuser:somepassword@myserver.mydomain.com/dst_db" + $ set DST_SCHEMA "myschema" + +Running from the ready-built Docker container, from within a directory holding only your ``.yaml`` and ``.yaml.gz`` files (please use WSL for this on Windows): + +.. code-block:: console + + $ docker run --rm --user $(id -u):$(id -g) --network host -e DST_SCHEMA=myschema -e DST_DSN=postgresql://someuser:somepassword@myserver.mydomain.com/dst_db -itv .:data --pull always timband/ssg + +Whichever we chose, now we can create the generators Python file: + +.. code-block:: console + + $ sqlsynthgen create-tables + $ sqlsynthgen create-vocab + $ sqlsynthgen create-generators --stats-file src-stats.yaml + $ sqlsynthgen create-data --num-passes 10 + +The first of these uses ``orm.yaml`` to create the destination database. +The second uses all the ``.yaml.gz`` (or ``.yaml``) files representing the vocabulary tables (this can take hours, too). +The third uses ``config.yaml`` to create a file ``ssg.py`` file containing code to call the generators as configured. +The last one actually generates the data. ``--num-passes`` controls how many rows are generated. +At present the only ways to generate different numbers of rows for different tables is to configure ``num_rows_per_pass`` in ``config.yaml``: + +.. code-block:: yaml + + observation: + num_rows_per_pass: 50 + +This makes every call to ``create-data`` produce 50 rows in the ``observation`` table (each time you change ``config.yaml` you need to re-run ``create-generators``). +If you call ``create-data`` multiple times you get more data added to whatever already exists. Call ``remove-data`` to remove all rows from all non-vocabulary tables. -We will have inserted 11 rows per table, with the last two commands. +You can call ``remove-vocab`` to remove all rows from all vocabulary tables, and you can call ``remove-tables`` to empty the database completely.