diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..f67ee96 --- /dev/null +++ b/.flake8 @@ -0,0 +1,64 @@ +[flake8] +exclude = + .git, + .pybuild, + .eggs, + __pycache__, + .venv + +max_line_length = 88 + +ignore = + #Don't require docstrings on magic methods. + D105, + + #Don't require docstrings for __init__ methods. + D107 + + #Don't require imperative mood docstrings. + D401, + + # Line break occurred before a binary operator + W503, + + # Whitespace before ':' + E203, + + # Don't error on TODOs + T000, + + ################################################################################################ + ################################################################################################ + ################################################################################################ + # TODO - Remove these soon!!! + + # Missing Docstring in public module + D100, + + # Missing Docstring in public module + D101, + + # Missing Docstring in public module + D102, + + # Missing docstring in public function + D103, + + # Missing docstring in public package + D104, + + # Missing docstring in public nested class + D106, + + # isort found an import in the wrong position + I001, + + # isort expected 1 blank line in imports, found 0 + I003, + + # isort found an unexpected blank line in imports + I004, + + ################################################################################################ + ################################################################################################ + ################################################################################################ diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..812435f --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,56 @@ +name: CI + +on: + push: + pull_request: + branches: + - main + +jobs: + lint: + runs-on: ubuntu-latest + + permissions: + id-token: write + contents: read + + strategy: + fail-fast: false + matrix: + python-version: ["3.9", "3.10", "3.11"] + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Poetry Action + uses: snok/install-poetry@v1 + + - name: Install Dependencies + run: poetry install + if: steps.cache.outputs.cache-hit != 'true' + + - name: Cache Poetry virtualenv + uses: actions/cache@v3 + id: cache + with: + path: ~/.virtualenvs + key: poetry-${{ hashFiles('**/poetry.lock') }} + restore-keys: | + poetry-${{ hashFiles('**/poetry.lock') }} + + - name: Run black + run: poetry run black --check . + + - name: Run isort + run: poetry run isort --check --verbose --recursive icekube + + - name: Run flake8 + run: poetry run flake8 icekube + + - name: Run mypy + run: poetry run mypy icekube diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1c09d7c --- /dev/null +++ b/.gitignore @@ -0,0 +1,161 @@ +data/ + +# Created by https://www.toptal.com/developers/gitignore/api/vim,python +# Edit at https://www.toptal.com/developers/gitignore?templates=vim,python + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +### Vim ### +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +*~ +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +# End of https://www.toptal.com/developers/gitignore/api/vim,python diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000..8bd7bee --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,10 @@ +[isort] +atomic = true +balanced_wrapping = true +# vertical hanging indent style wrapping +multi_line_output = 3 +include_trailing_comma = true + +known_first_party = kapi +default_section = THIRDPARTY +sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..7f8e3ce --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] WithSecure Oyj. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..92a56a6 --- /dev/null +++ b/README.md @@ -0,0 +1,66 @@ +# IceKube + +
+ +
+ +IceKube is a tool to help find attack paths within a Kubernetes cluster from a low privileged point, to a preferred location, typically `cluster-admin` + +## Setup + +* `docker-compose up -d` - Spins up neo4j, accessible at `http://localhost:7474/` +* `poetry install --no-dev` (creates venv) *OR* `pip install --user .` (installs the CLI globally) +* Make sure your `kubectl` current context is set to the target cluster, and has `cluster-admin` privileges + +## Permissions Required + +This requires elevated privileges within the target cluster to enumerate resources. This typically requires read-only access on all resources within the cluster including secrets. IceKube does not persist any secret data it retrieves from secrets if that is a concern. + +Resource types can also be filtered from IceKube, instructions can be found below in the `Filtering Resources` section. + +## Usage + +* `icekube enumerate` - Will enumerate all resources, and saves them into `neo4j` with generic relationships generated (note: not attack part relationships) +* `icekube attack-path` - Generates attack path relationships within `neo4j`, these are identified with relationships having the property `attack_path` which is set to `1` +* `icekube run` - Does both `enumerate` and `attack-path`, this will be the main option for quickly running IceKube against a cluster +* `icekube purge` - Removes everything from the `neo4j` database +* Run cypher queries within `neo4j` to discover attack paths and roam around the data, attack relationships will have the property `attack_path: 1` + +**NOTE**: In the `neo4j` browser, make sure to disable `Connect result nodes` in the Settings tab on the bottom left. This will stop it rendering every possible relationship automatically between nodes, leaving just the path queried for + +#### Filtering Resources + +It is possible to filter out specific resource types from enumeration. This can be done with the `--ignore` parameter to `enumerate` and `run` which takes the resource types comma-delimtied. For example, if you wish to exclude events and componentstatuses, you could run `icekube run --ignore events,componentstatuses` (NOTE: this is the default) + +Sensitive data from secrets are not stored in IceKube, data retrieved from the Secret resource type have their data fields deleted on ingestion. It is recommended to include secrets as part of the query if possible as IceKube can still analyse the secret type and relevant annotations to aid with attack path generation. + +## Example Cypher Queries + +The following will find the shortest path from a Pod within the namespace `starting` to the ClusterRole `cluster-admin` using `attack_path` relationships + +```cypher +MATCH p = shortestPath((src:Pod {namespace: 'starting'})-[*]->(dest:ClusterRole {name: 'cluster-admin'})) WHERE ALL (r in relationships(p) WHERE EXISTS (r.attack_path)) RETURN p +``` + +Same thing, but gives additional data on the Namespace a resource is within + +```cypher +MATCH p = shortestPath((src:Pod {namespace: 'starting'})-[*]->(dest:ClusterRole {name: 'cluster-admin'})) WHERE ALL (r in relationships(p) WHERE EXISTS (r.attack_path)) UNWIND nodes(p) AS n MATCH (n)-[r:WITHIN_NAMESPACE]->(ns:Namespace) RETURN p, ns, r +``` + +Finds all Pods / ServiceAccounts / Users / Groups that have access to the ClusterRole `cluster-admin` through a ClusterRoleBinding to ensure it has a cluster wide scope + +```cypher +MATCH p = shortestPath((src)-[*]->(cr:ClusterRole {name: 'cluster-admin'})) WHERE ALL (r in relationships(p) WHERE EXISTS (r.attack_path)) AND (src:ServiceAccount OR src:Pod or src:User or src:Group) AND all(n in [[x in nodes(p)][-2]] WHERE (n:ClusterRoleBinding)-[:GRANTS_PERMISSION]->(cr)) RETURN p +``` + +Finds any node that can get to the ClusterRole `cluster-admin` through a ClusterRoleBinding to ensure it has a cluster wide scope + +```cypher +MATCH p1=((crb:ClusterRoleBinding)-[:GRANTS_PERMISSION]->(cr:ClusterRole {name: 'cluster-admin'})), p = shortestPath((src)-[*]->(crb)) WHERE ALL (r in relationships(p) WHERE r.attack_path = 1) AND (src <> crb) RETURN p, p1 +``` + +## Acknowledgements + +- [BloodHound](https://github.com/BloodHoundAD/BloodHound) - The original project showing the power of graph databases for security +- [KubeHound](https://github.com/DataDog/KubeHound) - An excellent and similar tool by DataDog, clearly we had similar ideas! diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..85b1505 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,12 @@ +version: "3" + +services: + neo4j: + image: neo4j:4.4 + environment: + NEO4J_AUTH: none + ports: + - 7474:7474 + - 7687:7687 + volumes: + - ./data:/data diff --git a/docs/.gitkeep b/docs/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/docs/ACCESS_POD.md b/docs/ACCESS_POD.md new file mode 100644 index 0000000..085d565 --- /dev/null +++ b/docs/ACCESS_POD.md @@ -0,0 +1,21 @@ +# ACCESS_POD + +### Overview + +This rule establishes an attack path between a node and pods hosted upon it. This allows IceKube to consider accessible pods should an attacker break out onto a node. + +### Description + +An attacker with access to a node can access all pods running on the node. + +### Defense + +N/A + +### Cypher Deep-Dive + +```cypher +MATCH (src:Node)-[:HOSTS_POD]->(dest:Pod) +``` + +The above query finds nodes (`src`) hosting pods (`dest`) through the `HOSTS_POD` relationship. diff --git a/docs/ACCESS_SECRET.md b/docs/ACCESS_SECRET.md new file mode 100644 index 0000000..7e5dcb6 --- /dev/null +++ b/docs/ACCESS_SECRET.md @@ -0,0 +1,21 @@ +# ACCESS_SECRET + +### Overview + +This attack path locates subjects which can access a secret. An attacker could use this to gain access to sensitive information, such as credentials. + +### Description + +Kubernetes secrets typically contain sensitive information, and are a prime target for attackers. This attack path identifies subjects which have the ability to read a secret. + +### Defense + +RBAC permissions regarding reading secrets should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_GET|GRANTS_LIST|GRANTS_WATCH]->(dest:Secret) +``` + +The above query finds subjects (`src`) which have read permissions on a secret (`dest`). diff --git a/docs/AZURE_POD_IDENTITY_EXCEPTION.md b/docs/AZURE_POD_IDENTITY_EXCEPTION.md new file mode 100644 index 0000000..d82d090 --- /dev/null +++ b/docs/AZURE_POD_IDENTITY_EXCEPTION.md @@ -0,0 +1,50 @@ +# AZURE_POD_IDENTITY_EXCEPTION + +### Overview + +This attack path aims to locate subjects which can access the cluster's Azure managed identity, allowing them to retrieve cluster administrator credentials in cases where Kubernetes local accounts are enabled. + +### Description + +`AzurePodIdentityException` creates exceptions for pods to remove IPTables filtering for their access to Instance Metadata Service (IMDS). If a pod is exempt from this filtering, they can communicate with IMDS to retrieve the clusters Node Managed Identity (NMI) and authenticate as it. Once authenticated, this can be used to gain cluster administrator access in clusters where Kubernetes local accounts are enabled. + +An attacker has multiple avenues that could leverage `AzurePodIdentityException`. The first would be reviewing the pod labels from an existing `AzurePodIdentityException`, and creating or modifying workloads to meet those criteria within the same namespace. The resultant pods would have access to IMDS, and could contain malicious code based of the pod configuration that allows an attacker to gain a foothold within to leverage the access. + +Another option would be to create a new `AzurePodIdentityException` within the same namespace of a compromised pod. This exception would need to specify the labels of the compromised workload. This would remove any filtering from the workload, allowing it to once again access IMDS. + +### Defense + +RBAC permissions regarding `AzurePodIdentityExceptions` should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + + +#### Create workload based of existing APIE + +```cypher +MATCH (src)-[:GRANTS_GET|GRANTS_LIST|GRANTS_WATCH]->(azexc:AzurePodIdentityException)-[:WITHIN_NAMESPACE]->(ns:Namespace), (dest:ClusterRoleBinding) +WHERE (dest.name = 'aks-cluster-admin-binding' OR dest.name = 'aks-cluster-admin-binding-aad') AND (EXISTS { + MATCH (src)-[:GRANTS_REPLICATIONCONTROLLERS_CREATE|GRANTS_DAEMONSETS_CREATE|GRANTS_DEPLOYMENTS_CREATE|GRANTS_REPLICASETS_CREATE|GRANTS_STATEFULSETS_CREATE|GRANTS_CRONJOBS_CREATE|GRANTS_JOBS_CREATE|GRANTS_POD_CREATE]->(ns) +} OR EXISTS { + MATCH (src)-[:GRANTS_PATCH|GRANTS_UPDATE]->(workload)-[:WITHIN_NAMESPACE]->(ns) + WHERE (workload:ReplicationController OR workload:DaemonSet OR workload:Deployment OR workload:ReplicaSet OR workload:StatefulSet OR workload:CronJob OR workload:Job) +}) +``` + +The above query finds subjects (`src`) which can view the `AzurePodIdentityException` configuration. It then checks that same subject can create or update workloads in the same namespace as the `AzurePodIdentityException`. The target is set as the default AKS cluster admin role bindings. + + +#### Create APIE based of existing workload + +```cypher +MATCH (src)-[:GRANTS_GET|GRANTS_LIST|GRANTS_WATCH]->(pod:Pod)-[:WITHIN_NAMESPACE]->(ns:Namespace), (src)-[r { + attack_path: 1 +}]->(pod), (dest:ClusterRoleBinding) +WHERE (dest.name='aks-cluster-admin-binding' OR dest.name='aks-cluster-admin-binding-aad') AND (EXISTS { + (src)-[:GRANTS_AZUREPODIDENTITYEXCEPTIONS_CREATE]->(ns) +} OR EXISTS { + (src)-[:GRANTS_UPDATE|GRANTS_PATCH]->(:AzurePodIdentityException)-[:WITHIN_NAMESPACE]->(ns) +}) +``` + +The above query finds subjects (`src`) which can get pods, and have an attack path to that pod. It then ensures the subject can create or update `AzurePodIdentityException` within the same namespace. The target is set as the default AKS cluster admin cluster role bindings. diff --git a/docs/BOUND_TO.md b/docs/BOUND_TO.md new file mode 100644 index 0000000..4657033 --- /dev/null +++ b/docs/BOUND_TO.md @@ -0,0 +1,21 @@ +# BOUND_TO + +### Overview + +This rule establishes an attack path relationship between a role binding and its subjects. This allows IceKube to consider permissions associated with the role binding for required subjects. + +### Description + +Role bindings bind a number of subjects with a role. The permissions granted to the subjects will be that of the bound role. If both the role binding and role are scoped cluster-wide, the permissions are also granted cluster-wide. + +### Defense + +Review subjects in role bindings, and ensure subjects are only bound to roles that grant the minimal set of permissions required for use. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:BOUND_TO]->(dest) +``` + +Finds all resources (`src`) that have a `BOUND_TO` relationship to other resources (`dest`). The `BOUND_TO` relationship is only between role binding and subject, thereby limiting `dest` to `RoleBinding` or `ClusterRoleBinding` and `src` to one of `Group`, `User`, `ServiceAccount`. diff --git a/docs/CAN_ACCESS_DANGEROUS_HOST_PATH.md b/docs/CAN_ACCESS_DANGEROUS_HOST_PATH.md new file mode 100644 index 0000000..f05daef --- /dev/null +++ b/docs/CAN_ACCESS_DANGEROUS_HOST_PATH.md @@ -0,0 +1,27 @@ +# CAN_ACCESS_DANGEROUS_HOST_PATH + +### Overview + +This attack path aims to locate pods which have potentially dangerous paths from the underlying node's file system mounted. These could be used to gain a foothold on the underlying node or gain node credentials. + +### Description + +Pods can mount paths from the underlying host. These are `hostPath` volume types. Access to certain paths on the host could be considered dangerous as it may grant access to sensitive resources on the host. This could include the kubelet credentials, the roots home directory, the container socket, etc. + +An attacker with access to these resources could potentially gain access to the underlying host, or gain access to sensitive credentials. + +### Defense + +Pod Security Admission (PSA) should be configured to enforce the `restricted` standard. Should this be too restrictive, `baseline` could be used instead. + +PSA can be limited in its flexibility, for example having a policy that slightly deviates from the `restricted` standard. Should further flexibility be required compared to what PSA can provide, custom admission webhooks should be used to enforce pod security. + +Should host path volumes be required, the volumes should be reviewed to ensure they do not expose sensitive files from the host. + +### Cypher Deep-Dive + +```cypher +MATCH (src:Pod {dangerous_host_path: true})<-[:HOSTS_POD]-(dest:Node) +``` + +The above query finds pods (`src`) with the `dangerous_host_path` property set to `true`. This property is set by IceKube if a `hostPath` volume matches a number of pre-configured dangerous paths. The node (`dest`) that hosts the pod is then targeted. diff --git a/docs/CAN_ACCESS_HOST_FD.md b/docs/CAN_ACCESS_HOST_FD.md new file mode 100644 index 0000000..4061265 --- /dev/null +++ b/docs/CAN_ACCESS_HOST_FD.md @@ -0,0 +1,26 @@ +# CAN_ACCESS_HOST_FD + +### Overview + +This attack path aims to locate pods which have the `DAC_READ_SEARCH` capability which could allow accessing files on the host filesystem. An attacker could use this to break out onto the underlying node. + +### Description + +The `DAC_READ_SEARCH` capability grants access to `open_by_handle_at` which allows opening file descriptors across mount namespaces. `DAC_READ_SEARCH` by itself simply grants read access to the files opened, however when combined with `DAC_OVERRIDE` (a default capability) can provide write permissions. + +An attacker could use this access to open sensitive files on the underlying host, and either retrieve credentials to gain access to the underlying host or the kubelet credentials. Should `DAC_OVERRIDE` be present, access could be used to write authentication material such as an SSH key to an `authorized_keys` file. + +### Defense + +Pod Security Admission (PSA) should be configured to enforce the `restricted` standard. Should this be too restrictive, `baseline` could be used instead. + +PSA can be limited in its flexibility, for example having a policy that slightly deviates from the `restricted` standard. Should further flexibility be required compared to what PSA can provide, custom admission webhooks should be used to enforce pod security. + +### Cypher Deep-Dive + +```cypher +MATCH (src:Pod)<-[:HOSTS_POD]-(dest:Node) +WHERE "DAC_READ_SEARCH" in src.capabilities +``` + +The above query finds pods (`src`) where `DAC_READ_SEARCH` is in its `capabilities` property. This property is populated by IceKube with the capabilities calculated from the pod spec. The node (`dest`) that hosts the pod is then targeted. diff --git a/docs/CAN_CGROUP_BREAKOUT.md b/docs/CAN_CGROUP_BREAKOUT.md new file mode 100644 index 0000000..87514c2 --- /dev/null +++ b/docs/CAN_CGROUP_BREAKOUT.md @@ -0,0 +1,26 @@ +# CAN_CGROUP_BREAKOUT + +### Overview + +This attack path aims to locate pods which have the `SYS_ADMIN` capability, and as such can breakout onto the underlying node. + +### Description + +The `SYS_ADMIN` capability provides the ability to perform a wide range of administrator operations. One of these operations is the ability to configure a release agent for a cgroup. This agent is triggered once the last task of the cgroup exits. The release agent is run as root on the underlying host. + +An attacker with this capability could utilise cgroups to execute commands on the underlying node, thereby breaking out of the current container. + +### Defense + +Pod Security Admission (PSA) should be configured to enforce the `restricted` standard. Should this be too restrictive, `baseline` could be used instead. + +PSA can be limited in its flexibility, for example having a policy that slightly deviates from the `restricted` standard. Should further flexibility be required compared to what PSA can provide, custom admission webhooks should be used to enforce pod security. + +### Cypher Deep-Dive + +```cypher +MATCH (src:Pod)<-[:HOSTS_POD]-(dest:Node) +WHERE "SYS_ADMIN" in src.capabilities +``` + +The above query finds pods (`src`) where `SYS_ADMIN` is in its `capabilities` property. This property is populated by IceKube with the capabilities calculated from the pod spec. The node (`dest`) that hosts the pod is then targeted. diff --git a/docs/CAN_EXEC_THROUGH_KUBELET.md b/docs/CAN_EXEC_THROUGH_KUBELET.md new file mode 100644 index 0000000..0dfceec --- /dev/null +++ b/docs/CAN_EXEC_THROUGH_KUBELET.md @@ -0,0 +1,24 @@ +# CAN_EXEC_THROUGH_KUBELET + +### Overview + +This attack path aims to locate subjects which can execute commands on pods directly through the kubelet. This could allow an attacker to get a foothold on that pod. + +### Description + +The kubelet runs its own API with its own set of access controls. Authorisation for these endpoints are based on allowed verbs to sub resources on the `nodes` resource. For example, a GET request to `/stats/*` requires the `get` verb on `nodes/stats`. Tables showing the required request verb and sub resource for a particular endpoint can be found in the [Kubernetes documentation](https://kubernetes.io/docs/reference/access-authn-authz/kubelet-authn-authz/#kubelet-authorization) +The `/exec` path allows for the execution of commands in containers. This path is authorised by the `nodes/proxy` sub resource and the required verb is create. + +An attacker with create on `nodes/proxy` for a particular node can execute commands on containers running on that node. Potentially gaining a foothold within those containers. + +### Defense + +RBAC permissions regarding the `nodes/proxy`sub resource should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_PROXY_CREATE]->(:Node)-[:HOSTS_POD]->(dest:Pod) +``` + +The above query finds subjects (`src`) with the create permission on the `nodes/proxy` sub resource. The target is set as pods (`dest`) running on that particular node determined through the `HOSTS_POD` relationship. diff --git a/docs/CAN_IMPERSONATE.md b/docs/CAN_IMPERSONATE.md new file mode 100644 index 0000000..8f93115 --- /dev/null +++ b/docs/CAN_IMPERSONATE.md @@ -0,0 +1,23 @@ +# CAN_IMPERSONATE + +### Overview + +This attack path aims to locate subjects which have the impersonate permission, allowing them to impersonate other subjects. + +### Description + +Should a subject have the `impersonate` verb on another subject, they can perform requests against the API server specifying the other subject as an impersonation target. The actions performed are then performed as if the original subject was the targeted subject. This could be done with the `--as` flag to `kubectl`. + +An attacker could use this to laterally move within the cluster to other subjects. + +### Defense + +RBAC permissions regarding the impersonate verb should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_IMPERSONATE]->(dest) +``` + +The above query finds all resources (`src`) that have the impersonate verb on a target resource (`dest`) diff --git a/docs/CAN_LOAD_KERNEL_MODULES.md b/docs/CAN_LOAD_KERNEL_MODULES.md new file mode 100644 index 0000000..38ba9e6 --- /dev/null +++ b/docs/CAN_LOAD_KERNEL_MODULES.md @@ -0,0 +1,26 @@ +# CAN_LOAD_KERNEL_MODULES + +### Overview + +This attack path aims to locate pods which have the `SYS_MODULE` capability, and as such can breakout onto the underlying node. + +### Description + +The `SYS_MODULE` capability allows management of Kernel modules. This includes loading additional modules. + +An attacker with this capability could load a custom module with malicious code. The code would then be executed by the kernel allowing for commands to be run outside of the container, effectively breaking out. + +### Defense + +Pod Security Admission (PSA) should be configured to enforce the `restricted` standard. Should this be too restrictive, `baseline` could be used instead. + +PSA can be limited in its flexibility, for example having a policy that slightly deviates from the `restricted` standard. Should further flexibility be required compared to what PSA can provide, custom admission webhooks should be used to enforce pod security. + +### Cypher Deep-Dive + +```cypher +MATCH (src:Pod)<-[:HOSTS_POD]-(dest:Node) +WHERE "SYS_MODULE" in src.capabilities +``` + +The above query finds pods (`src`) where `SYS_MODULE` is in its `capabilities` property. This property is populated by IceKube with the capabilities calculated from the pod spec. The node (`dest`) that hosts the pod is then targeted. diff --git a/docs/CAN_NSENTER_HOST.md b/docs/CAN_NSENTER_HOST.md new file mode 100644 index 0000000..1735d39 --- /dev/null +++ b/docs/CAN_NSENTER_HOST.md @@ -0,0 +1,26 @@ +# CAN_NSENTER_HOST + +### Overview + +This attack path aims to locate pods which have either the `SYS_ADMIN` or `SYS_PTRACE` capability and share the host's PID namespace allowing them to break out onto the underlying node. + +### Description + +An attacker with access to a pod which has either the `SYS_ADMIN` or `SYS_PTRACE` capability and shares the host's PID namespace could potentially break out of the pod using the `nsenter` utility. + +An example command could be `nsenter -t 1 -a`. + +### Defense + +Pod Security Admission (PSA) should be configured to enforce the `restricted` standard. Should this be too restrictive, `baseline` could be used instead. + +PSA can be limited in its flexibility, for example having a policy that slightly deviates from the `restricted` standard. Should further flexibility be required compared to what PSA can provide, custom admission webhooks should be used to enforce pod security. + +### Cypher Deep-Dive + +```cypher +MATCH (src:Pod {hostPID: true})<-[:HOSTS_POD]-(dest:Node) +WHERE all(x in ["SYS_ADMIN", "SYS_PTRACE"] WHERE x in src.capabilities) +``` + +The above query finds pods (`src`) configured with both the `SYS_ADMIN` or `SYS_PTRACE` capabilities and shares the node's PID namespace. These parameters are configured by IceKube based of the pod spec. The target (`dest`) is set as the node upon which the pod is running. diff --git a/docs/CREATE_POD_WITH_SA.md b/docs/CREATE_POD_WITH_SA.md new file mode 100644 index 0000000..bfff55d --- /dev/null +++ b/docs/CREATE_POD_WITH_SA.md @@ -0,0 +1,37 @@ +# CREATE_POD_WITH_SA + +### Overview + +This attack path aims to locate subjects which can create pods in a namespace with the target service account. Upon successful exploitation, an attacker will gain the permissions of the target service account. + +### Description + +An attacker with the ability to create a pod could configure the service account associated with the pod by setting the `serviceAccountName` field in the pod spec. Should the value specified match the name of a service account in the namespace the pod is deployed in, the token for that service account can be mounted into the pod. + +As the attacker has created the pod, they would also have control of the image and the command executed. This could be configured to exfiltrate the token to the attacker. This could be by outputting it to `stdout` if the attacker has `pods/logs` permissions, or exfiltrating the token over the network, or some other means. + +Once the attacker has acquired the token, they would be able to perform actions against the API server as the service account. + +### Defense + +RBAC permissions to create pods and workloads should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_PODS_CREATE|GRANTS_REPLICATIONCONTROLLERS_CREATE|GRANTS_DAEMONSETS_CREATE|GRANTS_DEPLOYMENTS_CREATE|GRANTS_REPLICASETS_CREATE|GRANTS_STATEFULSETS_CREATE|GRANTS_CRONJOBS_CREATE|GRANTS_JOBS_CREATE]->(ns:Namespace)<-[:WITHIN_NAMESPACE]-(dest:ServiceAccount) +``` + +The above query finds all resources (`src`) that have the `CREATE` permission against workload resources types within a specified namespace. All `CREATE` verbs are against the namespace for a namespaced resource. The target node (`dest`) is a service account within the same namespace as where the workload creation is permitted. + +Workload creation is used as opposed to solely pods because various Kubernetes controllers create pods automatically from more abstract workload resources. Configuration of the workload resource also configures the created pod, thus it would allow an attacker to create the desired pod. + +Workload creation includes the following: +- `pods` +- `replicationcontrollers` +- `daemonsets` +- `deployments` +- `replicasets` +- `statefulsets` +- `cornjobs` +- `jobs` diff --git a/docs/DEBUG_POD.md b/docs/DEBUG_POD.md new file mode 100644 index 0000000..476754b --- /dev/null +++ b/docs/DEBUG_POD.md @@ -0,0 +1,22 @@ +# DEBUG_POD + +### Overview + +This attack path aims to locate subjects which can create debug containers within a pod. An attacker could use this to gain a foothold within a pod. + +### Description + +An attacker with permissions to debug a pod can contain a new container in the pod. This could also be configured to share the process namespace of an existing container in the pod. An attacker could use this to gain access to the containers filesystem, including service account tokens, as well as its network stack. + +The ability to debug a pod requires the `patch` verb on `pods/ephemeral` for the targeted pod. + +### Defense + +RBAC permissions regarding the `patch` permission on the `pods/ephemeral` sub resource should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_EPHEMERAL_PATCH]->(dest:Pod) +``` +Finds all resources (`src`) that have a `GRANTS_EPHEMERAL_PATCH` relationship to pods (`dest`). diff --git a/docs/EXEC_INTO.md b/docs/EXEC_INTO.md new file mode 100644 index 0000000..c1b1ec1 --- /dev/null +++ b/docs/EXEC_INTO.md @@ -0,0 +1,23 @@ +# EXEC_INTO + +### Overview + +This attack path aims to locate subjects which can execute into pods. An attacker could use this to gain a foothold in a running pod. + +### Description + +An attacker with the ability to execute commands within a pod could gain access to the data within. This would include access to its processes, filesystem, network position, etc. This could be used as a foothold for further attacks within the cluster. + +Executing commands in a pod requires two permissions. The first is `create` on `pods/exec` and the second is `get` on `pods`. Both of those permissions should affect the target pod. + +### Defense + +RBAC permissions regarding the outlined permissions should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_EXEC_CREATE]->(dest:Pod)<-[:GRANTS_GET]-(src) +``` + +The above query finds all resources (`src`) that have `GRANTS_EXEC_CREATE` and `GRANTS_GET` on a Pod (`dest`). The two relationships map to the two required permissions for executing commands within a pod. diff --git a/docs/GENERATE_CLIENT_CERTIFICATE.md b/docs/GENERATE_CLIENT_CERTIFICATE.md new file mode 100644 index 0000000..496c384 --- /dev/null +++ b/docs/GENERATE_CLIENT_CERTIFICATE.md @@ -0,0 +1,34 @@ +# GENERATE_CLIENT_CERTIFICATE + +### Overview + +This attack path aims to locate subjects which can create a certificate signing request (CSR) _and_ approve its signing. An attacker can use this to generate credentials for another user, service account, or group. + +### Description + +The CSR API allows the submission of Certificate Signing Requests (CSRs). Should the CSR be signed by the `kubernetes.io/kube-apiserver-client` signer, the signed certificate can be used as a client certificate for the purpose of authenticating to the cluster. The common name of the certificate specifies the users username, and the organisations their groups. + +Should an attacker have the ability to create CSRs they could submit certificate requests for other subjects of the cluster. Should they also have the ability to approve the signing with the above signer, signed certificates for the specified subjects would be generated. + +An attacker could use these to escalate their privileges within the cluster. + +### Defense + +RBAC permissions regarding the creation and approval of CSRs should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_CERTIFICATESIGNINGREQUESTS_CREATE]->(cluster:Cluster), (dest) +WHERE (src)-[:HAS_CSR_APPROVAL]->(cluster) AND (src)-[:GRANTS_APPROVE]->(:Signer { + name: "kubernetes.io/kube-apiserver-client" +}) AND (dest:User OR dest:Group OR dest:ServiceAccount) +``` + +The above query ensure a resource (`src`) has the following three permissions: + +- Ability to create CSRs through `GRANTS_CERTIFICATESIGNINGREQUESTS_CREATE` +- Ability to approve CSRs through `HAS_CSR_APPROVAL` +- Approved to use the `kubernetes.io/kube-apiserver-client` signer through `GRANTS_APPROVE` + +Should all three conditions be met, subjects (`dest`) are targeted if they are a `User`, `Group` or `ServiceAccount` diff --git a/docs/GENERATE_TOKEN.md b/docs/GENERATE_TOKEN.md new file mode 100644 index 0000000..4915aab --- /dev/null +++ b/docs/GENERATE_TOKEN.md @@ -0,0 +1,23 @@ +# GENERATE_TOKEN + +### Overview + +This attack path aims to locate resources which can generate a token for a given service account. Upon successful exploitation, an attacker will gain the permissions of the target service account. + +### Description + +Short-lived service account tokens can be generated by the API server. This requires the `create` verb on `serviceaccounts/token` for the targeted service account. + +Upon generation of a token, it can be used to perform actions against the API server as the service account. + +### Defense + +RBAC access to token creation should be reviewed and access restricted to required subjects. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_TOKEN_CREATE]->(dest:ServiceAccount) +``` + +The above query finds all resources (`src`) that have the permission to create a token for a given service account. The target (`dest`) is the targeted service account. diff --git a/docs/GET_AUTHENTICATION_TOKEN_FOR.md b/docs/GET_AUTHENTICATION_TOKEN_FOR.md new file mode 100644 index 0000000..a1b2c9f --- /dev/null +++ b/docs/GET_AUTHENTICATION_TOKEN_FOR.md @@ -0,0 +1,23 @@ +# GET_AUTHENTICATION_TOKEN_FOR + +### Overview + +This attack path aims to locate resources which can get a long-lived service account token for a given service account. Upon successful exploitation, an attacker will gain the permissions of the target service account. + +### Description + +Kubernetes secrets can contain long-lived tokens for service accounts. These are when the secret type is set to `kubernetes.io/service-account-token`. Should this be set, the `kubernetes.io/service-account.name` annotation determines which service account the token is created for by a Kubernetes controller with which the secret is automatically populated. + +An attacker with read access to this secret would be able to use the token to perform actions against the API server as the service account. + +### Defense + +Long-lived service account tokens should be avoided in favour of short-lived tokens using the `TokenRequest` API. Should this not be possible, RBAC permissions should be reviewed to limit access to this permission to those required. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_GET|GRANTS_LIST|GRANTS_WATCH]->(secret:Secret)-[:AUTHENTICATION_TOKEN_FOR]->(dest:ServiceAccount) +``` + +Thee above query finds all resources (`src`) that have either the GET, LIST or WATCH permissions on a secret containing a token. The service account that the token is for is the target (`dest`). diff --git a/docs/GRANTS_PERMISSION.md b/docs/GRANTS_PERMISSION.md new file mode 100644 index 0000000..cc8338b --- /dev/null +++ b/docs/GRANTS_PERMISSION.md @@ -0,0 +1,21 @@ +# GRANTS_PERMISSION + +### Overview + +This rule establishes an attack path relationship between a role binding and its role. This allows IceKube to consider role permissions for the associated role binding and, by extension, its subjects. + +### Description + +Role bindings bind a number of subjects with a role. The permissions granted to the subjects will be that of the bound role. If both the role binding and role are scoped cluster-wide, the permissions are also granted cluster-wide. + +### Defense + +Review associated roles for a role binding, and ensure roles that grant the minimal set of permissions required are attached. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_PERMISSION]->(dest) +``` + +Finds all resources (`src`) that have a `GRANTS_PERMISSION` relationship to other resources (`dest`). The `GRANTS_PERMISSION` relationship is only between role binding and roles, thereby limiting `src` to `RoleBinding` or `ClusterRoleBinding` and `dest` to `Role` or `ClusterRole`. diff --git a/docs/IS_PRIVILEGED.md b/docs/IS_PRIVILEGED.md new file mode 100644 index 0000000..45d680c --- /dev/null +++ b/docs/IS_PRIVILEGED.md @@ -0,0 +1,25 @@ +# IS_PRIVILEGED + +### Overview + +This attack path aims to locate pods which are privileged, and as such could breakout onto the underlying node. + +### Description + +Privileged pods run their containers without much of the segregation typical containers have. This makes it significantly easier for a container breakout to occur granting an attacker a foothold on the underlying node. + +A number of techniques are available to breakout of a privileged pod. For example, mounting the underlying drives from `/dev/` and accessing the hosts filesystem. + +### Defense + +Pod Security Admission (PSA) should be configured to enforce the `restricted` standard. Should this be too restrictive, `baseline` could be used instead. + +PSA can be limited in its flexibility, for example having a policy that slightly deviates from the `restricted` standard. Should further flexibility be required compared to what PSA can provide, custom admission webhooks should be used to enforce pod security. + +### Cypher Deep-Dive + +```cypher +MATCH (src:Pod {privileged: true})<-[:HOSTS_POD]-(dest:Node) +``` + +The query above finds pods (`src`) that have the `privileged` property set to true. This property is configured by IceKube and is retrieved from the pod spec. The node (`dest`) that hosts the pod is then targeted. diff --git a/docs/MOUNTS_SECRET.md b/docs/MOUNTS_SECRET.md new file mode 100644 index 0000000..60c3160 --- /dev/null +++ b/docs/MOUNTS_SECRET.md @@ -0,0 +1,21 @@ +# MOUNTS_SECRET + +### Overview + +This attack path locates pods with mounted secrets. An attacker on a foothold on one of these pods would be able to access the values in the secret. + +### Description + +Kubernetes secrets typically contain sensitive information, and are a prime target for attackers. This attack path identifies pods which have this data mounted. + +### Defense + +Review which secrets are mounted into a pod, and ensure all secrets are required. + +### Cypher Deep-Dive + +```cypher +MATCH (src:Pod)-[:MOUNTS_SECRET]->(dest:Secret) +``` + +The above query finds pods (`src`) which have secrets (`dest`) mounted. diff --git a/docs/RBAC_ESCALATE_TO.md b/docs/RBAC_ESCALATE_TO.md new file mode 100644 index 0000000..543536a --- /dev/null +++ b/docs/RBAC_ESCALATE_TO.md @@ -0,0 +1,35 @@ +# RBAC_ESCALATE_TO + +### Overview + +This attack path aims to locate subjects which can escalate their privileges within the cluster by modifying bound roles. + +### Description + +By default, a subject is unable to grant more permissions in RBAC than they originally have access to. The `escalate` verb is a special verb that bypasses this restriction. It permits the modification of roles to add more permissions than the editor may have. + +This could be used by an attacker to modify a role that grants them permissions to include + +### Defense + +RBAC permissions regarding the `escalate` permission on roles should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +#### RoleBindings + +```cypher +MATCH (src:RoleBinding)-[:GRANTS_ESCALATE]->(role)-[:WITHIN_NAMESPACE]->(:Namespace)<-[:WITHIN_NAMESPACE]-(dest) +WHERE (role:Role OR role:ClusterRole) AND (src)-[:GRANTS_PERMISSION]->(role) +``` + +The above query finds role bindings (`src`) that has escalate permissions on a role. The role can either be a `Role` or a `ClusterRole`. The role binding must also be bound to the role with through the `GRANTS_PERMISSION` relationship. Finally, the namespace for the role is retrieved, and all resources within that namespace are targeted (`dest`). + +#### ClusterRoleBindings + +```cypher +MATCH (src:ClusterRoleBinding)-[:GRANTS_ESCALATE]->(role:ClusterRole), (dest) +WHERE (src)-[:GRANTS_PERMISSION]->(role) +``` + +The above query finds cluster role bindings (`src`) that has escalate permissions on a cluster role. The role binding must also be bound to the role with through the `GRANTS_PERMISSION` relationship. Finally, all resources within the database are targeted (`dest`). diff --git a/docs/REPLACE_IMAGE.md b/docs/REPLACE_IMAGE.md new file mode 100644 index 0000000..24a0040 --- /dev/null +++ b/docs/REPLACE_IMAGE.md @@ -0,0 +1,21 @@ +# REPLACE_IMAGE + +### Overview + +This attack path aims to locate subjects which have the ability to modify pods. An attacker can use this to replace a pod image, which could be used to inject malicious code that could be used to gain a foothold within the pod. + +### Description + +An attacker with permissions to patch a pod could replace the pod's image with a malicious one. This malicious image could include code that could aid an attacker in getting a foothold within the pod. For example, it may connect to an attacker-controlled server with a reverse shell. + +### Defense + +RBAC permissions regarding the patch permission should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_PATCH]->(dest:Pod) + ``` + +Finds all resources (`src`) that have a `GRANTS_PATCH` relationship to pods (`dest`). diff --git a/docs/UPDATE_AWS_AUTH.md b/docs/UPDATE_AWS_AUTH.md new file mode 100644 index 0000000..f861507 --- /dev/null +++ b/docs/UPDATE_AWS_AUTH.md @@ -0,0 +1,40 @@ +# UPDATE_AWS_AUTH + +### Overview + +This attack path is specific to AWS EKS, and aims to locate subjects which can make changes to the `aws-auth` ConfigMap. Upon successful exploitation, an attacker is considered to have reached the `system:masters` group. + +### Description + +In AWS EKS, the `aws-auth` ConfigMap is used to map AWS IAM roles to Kubernetes RBAC users and groups. As such, it allows the API server to enforce authorisation on AWS entities when accessing the cluster. Once an IAM identity is added to the ConfigMap, it will be able to access the cluster using the Kubernetes API with its permissions depending on the mapping created. + +An attacker with privileges which allows them to modify the `aws-auth` ConfigMap could add their own IAM roles to this configuration and granting their own role permissions within the cluster, including the `system:masters` group. + +An example addition to the ConfigMap can be seen below for the `mapRoles` section: + +```yaml +- groups: + - system:masters + rolearn: ATTACKER_CONTROLLED_ARN + username: user +``` + +An EKS token can then be manually generated or the kubeconfig file can be updated to automatically request a token for the configured role ARN which can be used to authenticate against the cluster. + +### Defense + +RBAC write access to the `aws-auth` ConfigMap within the `kube-system` namespace should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_PATCH|GRANTS_UPDATE]->(:ConfigMap { + name: 'aws-auth', namespace: 'kube-system' +}), (dest:Group { + name: 'system:masters' +}) +``` + +The above query finds all resources (`src`) that have the `PATCH` or `UPDATE` permission against the `aws-auth` ConfigMap. Both the namespace and name are used to specify the exact ConfigMap in case another version is present in a different namespace. + +As all queries must have a `src` and `dest` so IceKube knows the two sides of a relationship, and as this is one of the rare instances where the original query doesn't include the destination resource. A secondary query is added to query for the `system:masters` group and specify that as the `dest`. diff --git a/docs/UPDATE_WORKLOAD_WITH_SA.md b/docs/UPDATE_WORKLOAD_WITH_SA.md new file mode 100644 index 0000000..e00a6e9 --- /dev/null +++ b/docs/UPDATE_WORKLOAD_WITH_SA.md @@ -0,0 +1,37 @@ +# UPDATE_WORKLOAD_WITH_SA + +### Overview + +This attack path aims to locate subjects which can update workloads in a namespace with the target service account. Upon successful exploitation, an attacker will gain the permissions of the target service account. + +### Description + +An attacker with the ability to update workloads could configure the service account associated with the resultant pod by setting the `serviceAccountName` field in the pod spec. Should the value specified match the name of a service account in the namespace the pod is deployed in, the token for that service account can be mounted into the pod. + +As the attacker has configured the workload, they would also have control of the image and the command executed. This could be configured to exfiltrate the token to the attacker. This could be by outputting it to `stdout` if the attacker has `pods/logs` permissions, or exfiltrating the token over the network, or some other means. + +Once the attacker has acquired the token, they would be able to perform actions against the API server as the service account. + +### Defense + +RBAC permissions to update workloads should be reviewed. Access should be restricted to required entities. + +### Cypher Deep-Dive + +```cypher +MATCH (src)-[:GRANTS_UPDATE|GRANTS_PATCH]->(workload)-[:WITHIN_NAMESPACE]->(ns:Namespace)<-[:WITHIN_NAMESPACE]-(dest:ServiceAccount) +WHERE (workload:ReplicationController OR workload:DaemonSet OR workload:Deployment OR workload:ReplicaSet OR workload:StatefulSet OR workload:CronJob OR workload:Job) +``` + +The above query finds all resources (`src`) that have the `PATCH` or `UPDATE` permission against workload resources types within a specified namespace. All `PATCH` or `UPDATE` verbs are against the namespace for a namespaced resource. The target node (`dest`) is a service account within the same namespace as where the workload creation is permitted. + +Workload creation is used because various Kubernetes controllers create pods automatically from more abstract workload resources. Configuration of the workload resource also configures the created pod, thus it would allow an attacker to create the desired pod. + +Workload creation includes the following: +- `replicationcontrollers` +- `daemonsets` +- `deployments` +- `replicasets` +- `statefulsets` +- `cornjobs` +- `jobs` diff --git a/docs/USES_ACCOUNT.md b/docs/USES_ACCOUNT.md new file mode 100644 index 0000000..dd1d8ea --- /dev/null +++ b/docs/USES_ACCOUNT.md @@ -0,0 +1,42 @@ +# USES_ACCOUNT + +### Overview + +This attack path aims to locate pods which mount a service account token. Upon compromise of a pod, an attacker would gain access to the token allowing them to perform actions as the associated service account. + +### Description + +An attacker which gained access to a pod which uses a service account could leverage the service account's permissions, thus furthering themselves within the cluster. + +Pods are associated with service account. Should the service account token be mounted, it can typically be found at `/var/run/secrets/kubernetes.io/serviceaccount/token`. Upon compromise, an attacker can access the token and use it to communicate with the API server. This would allow them to perform actions as the service account. + +### Defense + +Service account tokens should only be mounted into a pod should it be required. By default, the tokens are mounted in so this needs to be explicitly disabled. This can be done by setting `automountServiceAccountToken` to `false` in the pod spec, or within the service account. Examples for both can be seen below: + +```yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + name: default +automountServiceAccountToken: false +... +``` + +```yaml +apiVersion: v1 +kind: Pod +metadata: + name: my-pod +spec: + automountServiceAccountToken: false + ... +``` + +### Cypher Deep-Dive + +``` +MATCH (src:Pod)-[:USES_ACCOUNT]->(dest:ServiceAccount) +``` + +The above query finds all `Pod` resources (`src`) and finds the configured `ServiceAccount` node (`dest`) by means of the `USE_ACCOUNT` relationship. diff --git a/docs/logo.png b/docs/logo.png new file mode 100644 index 0000000..5c0e30b Binary files /dev/null and b/docs/logo.png differ diff --git a/icekube/__init__.py b/icekube/__init__.py new file mode 100644 index 0000000..edde1a9 --- /dev/null +++ b/icekube/__init__.py @@ -0,0 +1,3 @@ +import urllib3 + +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) diff --git a/icekube/attack_paths.py b/icekube/attack_paths.py new file mode 100644 index 0000000..4ea5364 --- /dev/null +++ b/icekube/attack_paths.py @@ -0,0 +1,133 @@ +# flake8: noqa + +from typing import List + +WORKLOAD_TYPES = [ + "ReplicationController", + "DaemonSet", + "Deployment", + "ReplicaSet", + "StatefulSet", + "CronJob", + "Job", +] + + +def create_workload_query(workloads: List[str] = WORKLOAD_TYPES) -> str: + relationships = [f"GRANTS_{workload.upper()}S_CREATE" for workload in workloads] + return "|".join(relationships) + + +def workload_query( + workloads: List[str] = WORKLOAD_TYPES, name: str = "workload" +) -> str: + joined = f" OR {name}:".join(WORKLOAD_TYPES) + return f"({name}:{joined})" + + +attack_paths = { + # Subject -> Role Bindings + "BOUND_TO": "MATCH (src)-[:BOUND_TO]->(dest)", + # Role Binding -> Role + "GRANTS_PERMISSION": "MATCH (src)-[:GRANTS_PERMISSION]->(dest)", + # Pod -> Service Account + "USES_ACCOUNT": "MATCH (src:Pod)-[:USES_ACCOUNT]->(dest:ServiceAccount)", + # Pod -> Secrett + "MOUNTS_SECRET": "MATCH (src:Pod)-[:MOUNTS_SECRET]->(dest:Secret)", + # Subject has permission to create pod within namespace with target + # Service Account + "CREATE_POD_WITH_SA": f""" + MATCH (src)-[:GRANTS_PODS_CREATE|{create_workload_query()}]->(ns:Namespace)<-[:WITHIN_NAMESPACE]-(dest:ServiceAccount) + """, + # Subject has permission to update workload within namespace with target + # Service Account + "UPDATE_WORKLOAD_WITH_SA": f""" + MATCH (src)-[:GRANTS_UPDATE|GRANTS_PATCH]->(workload)-[:WITHIN_NAMESPACE]->(ns:Namespace)<-[:WITHIN_NAMESPACE]-(dest:ServiceAccount) + WHERE {workload_query()} + """, + # Subject -> Pod + "EXEC_INTO": "MATCH (src)-[:GRANTS_EXEC_CREATE]->(dest:Pod)<-[:GRANTS_GET]-(src)", + # Subject -> Pod + "REPLACE_IMAGE": "MATCH (src)-[:GRANTS_PATCH]->(dest:Pod)", + # Subject -> Pod + "DEBUG_POD": "MATCH (src)-[:GRANTS_EPHEMERAL_PATCH]->(dest:Pod)", + # Subject has permission to read authentication token for Service Account + "GET_AUTHENTICATION_TOKEN_FOR": """ + MATCH (src)-[:GRANTS_GET|GRANTS_LIST|GRANTS_WATCH]->(secret:Secret)-[:AUTHENTICATION_TOKEN_FOR]->(dest:ServiceAccount) + """, + # Subject -> Secret + "ACCESS_SECRET": "MATCH (src)-[:GRANTS_GET|GRANTS_LIST|GRANTS_WATCH]->(dest:Secret)", + # Create SA long lived token + # TODO: Need to account for then having permission to read secret + # "CREATE_SA_TOKEN_VIA_SECRET": "MATCH (src)-[:GRANTS_SECRET_CREATE]->" + # "(ns:Namespace)" + # "<-[:WITHIN_NAMESPACE]-(dest:ServiceAccount)", + # Generate service account token + "GENERATE_TOKEN": "MATCH (src)-[:GRANTS_TOKEN_CREATE]->(dest:ServiceAccount)", + # RBAC escalate verb to change a role to be more permissive + # TODO: expand to roles for other entities + "RBAC_ESCALATE_TO": [ + # RoleBindings + """ + MATCH (src:RoleBinding)-[:GRANTS_ESCALATE]->(role)-[:WITHIN_NAMESPACE]->(:Namespace)<-[:WITHIN_NAMESPACE]-(dest) + WHERE (role:Role OR role:ClusterRole) AND (src)-[:GRANTS_PERMISSION]->(role) + """, + # ClusterRoleBindings + """ + MATCH (src:ClusterRoleBinding)-[:GRANTS_ESCALATE]->(role:ClusterRole), (dest) + WHERE (src)-[:GRANTS_PERMISSION]->(role) + """, + ], + # TODO: RBAC Bind + # Subject -> User / Group / ServiceAccount + "GENERATE_CLIENT_CERTIFICATE": """ + MATCH (src)-[:GRANTS_CERTIFICATESIGNINGREQUESTS_CREATE]->(cluster:Cluster), (dest) + WHERE (src)-[:HAS_CSR_APPROVAL]->(cluster) AND (src)-[:GRANTS_APPROVE]->(:Signer { + name: "kubernetes.io/kube-apiserver-client" + }) AND (dest:User OR dest:Group OR dest:ServiceAccount) + """, + # Impersonate + "CAN_IMPERSONATE": "MATCH (src)-[:GRANTS_IMPERSONATE]->(dest)", + # Pod breakout + "IS_PRIVILEGED": "MATCH (src:Pod {privileged: true})<-[:HOSTS_POD]-(dest:Node)", + "CAN_CGROUP_BREAKOUT": 'MATCH (src:Pod)<-[:HOSTS_POD]-(dest:Node) WHERE "SYS_ADMIN" in src.capabilities', + "CAN_LOAD_KERNEL_MODULES": 'MATCH (src:Pod)<-[:HOSTS_POD]-(dest:Node) WHERE "SYS_MODULE" in src.capabilities', + "CAN_ACCESS_DANGEROUS_HOST_PATH": "MATCH (src:Pod {dangerous_host_path: true})<-[:HOSTS_POD]-(dest:Node)", + "CAN_NSENTER_HOST": 'MATCH (src:Pod {hostPID: true})<-[:HOSTS_POD]-(dest:Node) WHERE all(x in ["SYS_ADMIN", "SYS_PTRACE"] WHERE x in src.capabilities)', + "CAN_ACCESS_HOST_FD": 'MATCH (src:Pod)<-[:HOSTS_POD]-(dest:Node) WHERE "DAC_READ_SEARCH" in src.capabilities', + # Can jump to pods running on node + "ACCESS_POD": "MATCH (src:Node)-[:HOSTS_POD]->(dest:Pod)", + # Can exec into pods on a node + "CAN_EXEC_THROUGH_KUBELET": "MATCH (src)-[:GRANTS_PROXY_CREATE]->(:Node)-[:HOSTS_POD]->(dest:Pod)", + # Can update aws-auth ConfigMap + "UPDATE_AWS_AUTH": """ + MATCH (src)-[:GRANTS_PATCH|GRANTS_UPDATE]->(:ConfigMap { + name: 'aws-auth', namespace: 'kube-system' + }), (dest:Group { + name: 'system:masters' + }) + """, + "AZURE_POD_IDENTITY_EXCEPTION": [ + # Create workload based of existing APIE + f""" + MATCH (src)-[:GRANTS_GET|GRANTS_LIST|GRANTS_WATCH]->(azexc:AzurePodIdentityException)-[:WITHIN_NAMESPACE]->(ns:Namespace), (dest:ClusterRoleBinding) + WHERE (dest.name = 'aks-cluster-admin-binding' OR dest.name = 'aks-cluster-admin-binding-aad') AND (EXISTS {{ + MATCH (src)-[:{create_workload_query()}|GRANTS_POD_CREATE]->(ns) + }} OR EXISTS {{ + MATCH (src)-[:GRANTS_PATCH|GRANTS_UPDATE]->(workload)-[:WITHIN_NAMESPACE]->(ns) + WHERE {workload_query()} + }}) + """, + # Create APIE based of existing workload + """ + MATCH (src)-[:GRANTS_GET|GRANTS_LIST|GRANTS_WATCH]->(pod:Pod)-[:WITHIN_NAMESPACE]->(ns:Namespace), (src)-[r { + attack_path: 1 + }]->(pod), (dest:ClusterRoleBinding) + WHERE (dest.name='aks-cluster-admin-binding' OR dest.name='aks-cluster-admin-binding-aad') AND (EXISTS { + (src)-[:GRANTS_AZUREPODIDENTITYEXCEPTIONS_CREATE]->(ns) + } OR EXISTS { + (src)-[:GRANTS_UPDATE|GRANTS_PATCH]->(:AzurePodIdentityException)-[:WITHIN_NAMESPACE]->(ns) + }) + """, + ], +} diff --git a/icekube/cli.py b/icekube/cli.py new file mode 100644 index 0000000..26e4930 --- /dev/null +++ b/icekube/cli.py @@ -0,0 +1,173 @@ +import json +import logging +from collections import defaultdict +from pathlib import Path +from typing import Iterator, List, Optional, cast + +import typer +from icekube.config import config +from icekube.icekube import ( + create_indices, + enumerate_resource_kind, + generate_relationships, + purge_neo4j, + remove_attack_paths, + setup_attack_paths, +) +from icekube.kube import ( + APIResource, + Resource, + all_resources, + metadata_download, +) +from icekube.log_config import build_logger +from tqdm import tqdm + +app = typer.Typer() + +IGNORE_DEFAULT = "events,componentstatuses" + + +@app.command() +def run( + ignore: str = typer.Option( + IGNORE_DEFAULT, + help="Names of resource types to ignore", + ), +): + enumerate(ignore) + attack_path() + + +@app.command() +def enumerate( + ignore: str = typer.Option( + IGNORE_DEFAULT, + help="Names of resource types to ignore", + ), +): + create_indices() + enumerate_resource_kind(ignore.split(",")) + generate_relationships() + + +@app.command() +def relationships(): + generate_relationships() + + +@app.command() +def attack_path(): + remove_attack_paths() + setup_attack_paths() + + +@app.command() +def purge(): + purge_neo4j() + + +@app.command() +def download(output_dir: str): + path = Path(output_dir) + path.mkdir(exist_ok=True) + + resources = all_resources() + metadata = metadata_download() + + with open(path / "_metadata.json", "w") as fs: + fs.write(json.dumps(metadata, indent=2, default=str)) + + sorted_resources = defaultdict(list) + + for resource in resources: + if resource.raw: + sorted_resources[resource.plural].append(json.loads(resource.raw)) + + for plural, res in sorted_resources.items(): + with open(path / f"{plural}.json", "w") as fs: + fs.write(json.dumps(res, indent=2, default=str)) + + +@app.command() +def load(input_dir: str, attack_paths: bool = True): + path = Path(input_dir) + metadata = json.load(open(path / "_metadata.json")) + + from icekube import kube + from icekube import icekube + + kube.kube_version = lambda: cast(str, metadata["kube_version"]) + kube.context_name = lambda: cast(str, metadata["context_name"]) + kube.api_versions = lambda: cast(List[str], metadata["api_versions"]) + kube.preferred_versions = metadata["preferred_versions"] + kube.api_resources = lambda: cast( + List[APIResource], + [APIResource(**x) for x in metadata["api_resources"]], + ) + + icekube.api_resources = kube.api_resources + icekube.context_name = kube.context_name + icekube.kube_version = kube.kube_version + + resources = [] + + print("Loading files from disk") + for file in tqdm(path.glob("*")): + if file.name == "_metadata.json": + continue + try: + # If downloaded via kubectl get -A + data = json.load(open(file))["items"] + except TypeError: + # If downloaded via icekube download + data = json.load(open(file)) + + for resource in data: + resources.append( + Resource( + apiVersion=resource["apiVersion"], + kind=resource["kind"], + name=resource["metadata"]["name"], + namespace=resource["metadata"].get("namespace"), + plural=file.name.replace(".json", ""), + raw=json.dumps(resource, default=str), + ) + ) + print("") + + def all_resources( + preferred_versions_only: bool = True, + ignore: Optional[List[str]] = None, + ) -> Iterator[Resource]: + yield from resources + + kube.all_resources = all_resources + icekube.all_resources = all_resources + + if attack_paths: + run(IGNORE_DEFAULT) + else: + enumerate(IGNORE_DEFAULT) + + +@app.callback() +def callback( + neo4j_url: str = typer.Option("bolt://localhost:7687", show_default=True), + neo4j_user: str = typer.Option("neo4j", show_default=True), + neo4j_password: str = typer.Option("neo4j", show_default=True), + neo4j_encrypted: bool = typer.Option(False, show_default=True), + verbose: int = typer.Option(0, "--verbose", "-v", count=True), +): + config["neo4j"]["url"] = neo4j_url + config["neo4j"]["username"] = neo4j_user + config["neo4j"]["password"] = neo4j_password + config["neo4j"]["encrypted"] = neo4j_encrypted + + verbosity_levels = { + 0: logging.ERROR, + 1: logging.WARNING, + 2: logging.INFO, + 3: logging.DEBUG, + } + build_logger(verbosity_levels[verbose]) diff --git a/icekube/config.py b/icekube/config.py new file mode 100644 index 0000000..6304e12 --- /dev/null +++ b/icekube/config.py @@ -0,0 +1,22 @@ +from typing import TypedDict + + +class Neo4j(TypedDict): + url: str + username: str + password: str + encrypted: bool + + +class Config(TypedDict): + neo4j: Neo4j + + +config: Config = { + "neo4j": { + "url": "bolt://localhost:7687", + "username": "neo4j", + "password": "neo4j", + "encrypted": False, + }, +} diff --git a/icekube/icekube.py b/icekube/icekube.py new file mode 100644 index 0000000..c91f580 --- /dev/null +++ b/icekube/icekube.py @@ -0,0 +1,157 @@ +import logging +from concurrent.futures import ThreadPoolExecutor +from functools import partial +from typing import List, Optional + +from icekube.attack_paths import attack_paths +from icekube.kube import ( + all_resources, + api_resources, + context_name, + kube_version, +) +from icekube.models import Cluster, Signer +from icekube.models.base import Resource +from icekube.neo4j import create, find, get, get_driver +from neo4j import BoltDriver +from tqdm import tqdm + +logger = logging.getLogger(__name__) + + +def create_indices(): + for resource in api_resources(): + if "list" not in resource.verbs: + continue + + kind = resource.kind + namespace = resource.namespaced + + cmd = f"CREATE INDEX {kind.lower()} IF NOT EXISTS " + cmd += f"FOR (n:{kind}) ON (n.name" + if namespace: + cmd += ", n.namespace" + cmd += ")" + + with get_driver().session() as session: + session.run(cmd) + + +def enumerate_resource_kind( + ignore: Optional[List[str]] = None, +) -> List[Resource]: + if ignore is None: + ignore = [] + + resources: List[Resource] = [] + + with get_driver().session() as session: + cluster = Cluster(name=context_name(), version=kube_version()) + cmd, kwargs = create(cluster) + session.run(cmd, **kwargs) + + signers = [ + "kubernetes.io/kube-apiserver-client", + "kubernetes.io/kube-apiserver-client-kubelet", + "kubernetes.io/kubelet-serving", + "kubernetes.io/legacy-unknown", + ] + for signer in signers: + s = Signer(name=signer) + cmd, kwargs = create(s) + session.run(cmd, **kwargs) + + for resource in all_resources(ignore=ignore): + resources.append(resource) + cmd, kwargs = create(resource) + session.run(cmd, **kwargs) + + return resources + + +def relationship_generator( + driver: BoltDriver, + initial: bool, + resource: Resource, +): + with driver.session() as session: + logger.info(f"Generating relationships for {resource}") + for source, relationship, target in resource.relationships(initial): + if isinstance(source, Resource): + src_cmd, src_kwargs = get(source, prefix="src") + else: + src_cmd = source[0].format(prefix="src") + src_kwargs = {f"src_{key}": value for key, value in source[1].items()} + + if isinstance(target, Resource): + dst_cmd, dst_kwargs = get(target, prefix="dst") + else: + dst_cmd = target[0].format(prefix="dst") + dst_kwargs = {f"dst_{key}": value for key, value in target[1].items()} + + cmd = src_cmd + "WITH src " + dst_cmd + + if isinstance(relationship, str): + relationship = [relationship] + cmd += "".join(f"MERGE (src)-[:{x}]->(dst) " for x in relationship) + + kwargs = {**src_kwargs, **dst_kwargs} + logger.debug(f"Starting neo4j query: {cmd}, {kwargs}") + session.run(cmd, kwargs) + + +def generate_relationships(threaded: bool = False) -> None: + logger.info("Generating relationships") + logger.info("Fetching resources from neo4j") + driver = get_driver() + resources = find() + logger.info("Fetched resources from neo4j") + generator = partial(relationship_generator, driver, True) + + if threaded: + with ThreadPoolExecutor() as exc: + exc.map(generator, resources) + else: + print("First pass for relationships") + for resource in tqdm(resources): + generator(resource) + print("") + + # Do a second loop across relationships to handle objects created as part + # of other relationships + + resources = find() + generator = partial(relationship_generator, driver, False) + + if threaded: + with ThreadPoolExecutor() as exc: + exc.map(generator, resources) + else: + print("Second pass for relationships") + for resource in tqdm(resources): + generator(resource) + print("") + + +def remove_attack_paths() -> None: + with get_driver().session() as session: + session.run("MATCH ()-[r]-() WHERE EXISTS (r.attack_path) DELETE r") + + +def setup_attack_paths() -> None: + print("Generating attack paths") + for relationship, query in tqdm(attack_paths.items()): + with get_driver().session() as session: + if isinstance(query, str): + query = [query] + for q in query: + cmd = q + f" MERGE (src)-[:{relationship} {{ attack_path: 1 }}]->(dest)" + + session.run(cmd) + print("") + + +def purge_neo4j() -> None: + with get_driver().session() as session: + session.run("MATCH (x)-[r]-(y) DELETE x, r, y") + session.run("MATCH (x) DELETE x") diff --git a/icekube/kube.py b/icekube/kube.py new file mode 100644 index 0000000..ec28be7 --- /dev/null +++ b/icekube/kube.py @@ -0,0 +1,201 @@ +import logging +from collections.abc import Iterator +from typing import Any, Dict, List, Optional, cast + +from icekube.models import APIResource, Resource +from kubernetes import client, config +from tqdm import tqdm + +logger = logging.getLogger(__name__) + +loaded_kube_config = False +api_resources_cache: Optional[List[APIResource]] = None +preferred_versions: Dict[str, str] = {} + + +def load_kube_config(): + global loaded_kube_config + + if not loaded_kube_config: + config.load_kube_config() + loaded_kube_config = True + + +def kube_version() -> str: + load_kube_config() + return cast(str, client.VersionApi().get_code().git_version) + + +def context_name() -> str: + load_kube_config() + return cast(str, config.list_kube_config_contexts()[1]["context"]["cluster"]) + + +def api_versions() -> List[str]: + load_kube_config() + versions = [] + + for version in client.CoreApi().get_api_versions().versions: + versions.append(f"{version}") + + for api in client.ApisApi().get_api_versions().groups: + preferred_versions[api.name] = api.preferred_version.version + for v in api.versions: + versions.append(f"{api.name}/{v.version}") + + return sorted(versions) + + +def api_resources() -> List[APIResource]: + global api_resources_cache + load_kube_config() + + if api_resources_cache is not None: + return api_resources_cache + + try: + versions = api_versions() + except Exception: + logger.error("Failed to access Kubernetes cluster") + api_resources_cache = [] + return api_resources_cache + + resources: List[APIResource] = [] + + for version in versions: + if "/" in version: + group, vers = version.split("/") + resp = client.CustomObjectsApi().list_cluster_custom_object( + group, + vers, + "", + ) + preferred = preferred_versions[group] == vers + else: + resp = client.CoreV1Api().get_api_resources() + preferred = True + resp = resp.to_dict() + for item in resp["resources"]: + # if "/" in item["name"]: + # continue + # if not any(x in item["verbs"] for x in ["get", "list"]): + # continue + + additional_verbs = { + "roles": ["bind", "escalate"], + "clusterroles": ["bind", "escalate"], + "serviceaccounts": ["impersonate"], + "users": ["impersonate"], + "groups": ["impersonate"], + } + + if item["name"] in additional_verbs.keys(): + item["verbs"] = list( + set(item["verbs"] + additional_verbs[item["name"]]), + ) + + resources.append( + APIResource( + name=item["name"], + namespaced=item["namespaced"], + group=version, + kind=item["kind"], + preferred=preferred, + verbs=item["verbs"], + ), + ) + + if not any(x.name == "users" for x in resources): + resources.append( + APIResource( + name="users", + namespaced=False, + group="", + kind="User", + preferred=True, + verbs=["impersonate"], + ), + ) + + if not any(x.name == "groups" for x in resources): + resources.append( + APIResource( + name="groups", + namespaced=False, + group="", + kind="Group", + preferred=True, + verbs=["impersonate"], + ), + ) + + if not any(x.name == "signers" for x in resources): + resources.append( + APIResource( + name="signers", + namespaced=False, + group="certificates.k8s.io/v1", + kind="Signer", + preferred=True, + verbs=["approve", "sign"], + ) + ) + + api_resources_cache = resources + return resources + + +def all_resources( + preferred_versions_only: bool = True, + ignore: Optional[List[str]] = None, +) -> Iterator[Resource]: + load_kube_config() + + if ignore is None: + ignore = [] + + all_namespaces: List[str] = [ + x.metadata.name for x in client.CoreV1Api().list_namespace().items + ] + + print("Enumerating Kubernetes resources") + for resource_kind in tqdm(api_resources()): + if "list" not in resource_kind.verbs: + continue + + if preferred_versions_only and not resource_kind.preferred: + continue + + if resource_kind.name in ignore: + continue + + logger.info(f"Fetching {resource_kind.name} resources") + resource_class = Resource.get_kind_class( + resource_kind.group, + resource_kind.kind, + ) + if resource_kind.namespaced: + for ns in all_namespaces: + yield from resource_class.list( + resource_kind.group, + resource_kind.kind, + resource_kind.name, + ns, + ) + else: + yield from resource_class.list( + resource_kind.group, + resource_kind.kind, + resource_kind.name, + ) + print("") + + +def metadata_download() -> Dict[str, Any]: + return { + "kube_version": kube_version(), + "context_name": context_name(), + "api_versions": api_versions(), + "preferred_versions": preferred_versions, + "api_resources": [x.dict() for x in api_resources()], + } diff --git a/icekube/log_config.py b/icekube/log_config.py new file mode 100644 index 0000000..3797b00 --- /dev/null +++ b/icekube/log_config.py @@ -0,0 +1,23 @@ +import logging + +from tqdm.contrib.logging import _TqdmLoggingHandler, std_tqdm + + +def build_logger(debug_level=logging.DEBUG): + # create logger + logger = logging.getLogger("icekube") + logger.setLevel(debug_level) + # create console handler with a higher log level + ch = logging.StreamHandler() + ch.setLevel(debug_level) + # create formatter and add it to the handlers + formatter = logging.Formatter("%(asctime)s|%(name)s|%(levelname)s|%(message)s") + ch.setFormatter(formatter) + + # tell tqdm about the handler + tqdm_handler = _TqdmLoggingHandler(std_tqdm) + tqdm_handler.setFormatter(formatter) + tqdm_handler.stream = ch.stream + + # add the handlers to the logger + logger.addHandler(tqdm_handler) diff --git a/icekube/models/__init__.py b/icekube/models/__init__.py new file mode 100644 index 0000000..1cd3030 --- /dev/null +++ b/icekube/models/__init__.py @@ -0,0 +1,52 @@ +from typing import List, Type + +from icekube.models.api_resource import APIResource +from icekube.models.base import Resource +from icekube.models.cluster import Cluster +from icekube.models.clusterrole import ClusterRole +from icekube.models.clusterrolebinding import ClusterRoleBinding +from icekube.models.group import Group +from icekube.models.namespace import Namespace +from icekube.models.pod import Pod +from icekube.models.role import Role +from icekube.models.rolebinding import RoleBinding +from icekube.models.secret import Secret +from icekube.models.securitycontextconstraints import ( + SecurityContextConstraints, +) +from icekube.models.serviceaccount import ServiceAccount +from icekube.models.signer import Signer +from icekube.models.user import User + +enumerate_resource_kinds: List[Type[Resource]] = [ + ClusterRole, + ClusterRoleBinding, + Namespace, + Pod, + Role, + RoleBinding, + Secret, + SecurityContextConstraints, + ServiceAccount, +] + + +# plurals: Dict[str, Type[Resource]] = {x.plural: x for x in enumerate_resource_kinds} + + +__all__ = [ + "APIResource", + "Cluster", + "ClusterRole", + "ClusterRoleBinding", + "Group", + "Namespace", + "Pod", + "Role", + "RoleBinding", + "Secret", + "SecurityContextConstraints", + "ServiceAccount", + "Signer", + "User", +] diff --git a/icekube/models/_helpers.py b/icekube/models/_helpers.py new file mode 100644 index 0000000..26f63c2 --- /dev/null +++ b/icekube/models/_helpers.py @@ -0,0 +1,8 @@ +from typing import Any, Dict + +from kubernetes.client import ApiClient + + +def to_dict(resource) -> Dict[str, Any]: + resp: Dict[str, Any] = ApiClient().sanitize_for_serialization(resource) + return resp diff --git a/icekube/models/api_resource.py b/icekube/models/api_resource.py new file mode 100644 index 0000000..f262ed9 --- /dev/null +++ b/icekube/models/api_resource.py @@ -0,0 +1,12 @@ +from typing import List + +from pydantic import BaseModel + + +class APIResource(BaseModel): + name: str + namespaced: bool + group: str + kind: str + verbs: List[str] + preferred: bool = False diff --git a/icekube/models/base.py b/icekube/models/base.py new file mode 100644 index 0000000..387b9aa --- /dev/null +++ b/icekube/models/base.py @@ -0,0 +1,224 @@ +from __future__ import annotations + +import json +import logging +import traceback +from typing import Any, Dict, List, Optional, Tuple, Type, Union + +from icekube.utils import to_camel_case +from kubernetes import client +from pydantic import BaseModel, Field, root_validator + +logger = logging.getLogger(__name__) + + +class Resource(BaseModel): + apiVersion: str = Field(default=...) + kind: str = Field(default=...) + name: str = Field(default=...) + plural: str = Field(default=...) + namespace: Optional[str] = Field(default=None) + raw: Optional[str] = Field(default=None) + + def __new__(cls, **kwargs): + kind_class = cls.get_kind_class( + kwargs.get("apiVersion", ""), + kwargs.get("kind", cls.__name__), + ) + return super(Resource, kind_class).__new__(kind_class) + + def __repr__(self) -> str: + if self.namespace: + return f"{self.kind}(namespace='{self.namespace}', name='{self.name}')" + else: + return f"{self.kind}(name='{self.name}')" + + def __str__(self) -> str: + return self.__repr__() + + def __eq__(self, other) -> bool: + comparison_points = ["apiVersion", "kind", "namespace", "name"] + + return all(getattr(self, x) == getattr(other, x) for x in comparison_points) + + @root_validator(pre=True) + def inject_missing_required_fields(cls, values): + if not all(x in values for x in ["apiVersion", "kind", "plural"]): + from icekube.kube import api_resources, preferred_versions + + test_kind = values.get("kind", cls.__name__) # type: ignore + + for x in api_resources(): + if x.kind == test_kind: + if "/" in x.group: + group, version = x.group.split("/") + if preferred_versions[group] != version: + continue + api_resource = x + break + else: + # Nothing found, setting them to blank + + def get_value(field): + if field in values: + return values[field] + + if cls.__fields__[field].default: + return cls.__fields__[field].default + + if field == "kind": + return test_kind + + return "N/A" + + values["apiVersion"] = get_value("apiVersion") + values["kind"] = get_value("kind") + values["plural"] = get_value("plural") + + return values + + if "apiVersion" not in values: + values["apiVersion"] = api_resource.group + + if "kind" not in values: + values["kind"] = api_resource.kind + + if "plural" not in values: + values["plural"] = api_resource.name + + return values + + @classmethod + def get_kind_class(cls, apiVersion: str, kind: str) -> Type[Resource]: + subclasses = {x.__name__: x for x in cls.__subclasses__()} + try: + return subclasses[kind] + except KeyError: + return cls + + @property + def api_group(self) -> str: + if "/" in self.apiVersion: + return self.apiVersion.split("/")[0] + else: + # When the base APIGroup is "" + return "" + + @property + def unique_identifiers(self) -> Dict[str, str]: + ident = { + "apiGroup": self.api_group, + "apiVersion": self.apiVersion, + "kind": self.kind, + "name": self.name, + } + if self.namespace: + ident["namespace"] = self.namespace + return ident + + @property + def db_labels(self) -> Dict[str, Any]: + return { + **self.unique_identifiers, + "plural": self.plural, + "raw": self.raw, + } + + @classmethod + def list( + cls: Type[Resource], + apiVersion: str, + kind: str, + name: str, + namespace: Optional[str] = None, + ) -> List[Resource]: + try: + group, version = apiVersion.split("/") + except ValueError: + # Core v1 API + group = None + version = apiVersion + resources: List[Resource] = [] + if group: + if namespace: + resp = client.CustomObjectsApi().list_namespaced_custom_object( + group, + version, + namespace, + name, + ) + else: + resp = client.CustomObjectsApi().list_cluster_custom_object( + group, + version, + name, + ) + else: + if namespace: + func = f"list_namespaced_{to_camel_case(kind)}" + resp = json.loads( + getattr(client.CoreV1Api(), func)( + namespace, _preload_content=False + ).data + ) + else: + func = f"list_{to_camel_case(kind)}" + resp = json.loads( + getattr(client.CoreV1Api(), func)(_preload_content=False).data + ) + + for item in resp.get("items", []): + item["apiVersion"] = apiVersion + item["kind"] = kind + try: + resources.append( + Resource( + apiVersion=apiVersion, + kind=kind, + name=item["metadata"]["name"], + namespace=item["metadata"]["namespace"] if namespace else None, + plural=name, + raw=json.dumps(item, default=str), + ), + ) + except Exception: + logger.error( + f"Error when processing {kind} - " + f"{item['metadata'].get('namespace', '')}:" + f"{item['metadata']['name']}", + ) + traceback.print_exc() + + return resources + + def relationships( + self, + initial: bool = True, + ) -> List[RELATIONSHIP]: + logger.debug( + f"Generating {'initial' if initial else 'second'} set of relationships", + ) + from icekube.neo4j import mock + + relationships: List[RELATIONSHIP] = [] + + if self.namespace is not None: + ns = mock(Resource, name=self.namespace, kind="Namespace") + relationships += [ + ( + self, + "WITHIN_NAMESPACE", + ns, + ), + ] + + return relationships + + +QUERY_RESOURCE = Tuple[str, Dict[str, str]] + +RELATIONSHIP = Tuple[ + Union[Resource, QUERY_RESOURCE], + Union[str, List[str]], + Union[Resource, QUERY_RESOURCE], +] diff --git a/icekube/models/cluster.py b/icekube/models/cluster.py new file mode 100644 index 0000000..97c3275 --- /dev/null +++ b/icekube/models/cluster.py @@ -0,0 +1,37 @@ +from typing import Dict, List + +from icekube.models.base import RELATIONSHIP, Resource + + +class Cluster(Resource): + version: str + kind: str = "Cluster" + apiVersion: str = "N/A" + plural: str = "clusters" + + def __repr__(self) -> str: + return f"Cluster(name='{self.name}', version='{self.version}')" + + @property + def unique_identifiers(self) -> Dict[str, str]: + return { + "name": self.name, + "kind": self.kind, + "apiVersion": self.apiVersion, + } + + @property + def db_labels(self) -> Dict[str, str]: + return {**self.unique_identifiers, "version": self.version} + + def relationships( + self, + initial: bool = True, + ) -> List[RELATIONSHIP]: + relationships = super().relationships() + + query = "MATCH (src) WHERE NOT src:Cluster " + + relationships += [((query, {}), "WITHIN_CLUSTER", self)] + + return relationships diff --git a/icekube/models/clusterrole.py b/icekube/models/clusterrole.py new file mode 100644 index 0000000..c3b53d7 --- /dev/null +++ b/icekube/models/clusterrole.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +import json +from typing import List + +from icekube.models.base import Resource +from icekube.models.policyrule import PolicyRule +from pydantic import root_validator +from pydantic.fields import Field + + +class ClusterRole(Resource): + rules: List[PolicyRule] = Field(default_factory=list) + + @root_validator(pre=True) + def inject_rules(cls, values): + data = json.loads(values.get("raw", "{}")) + + if "rules" not in values or values["rules"] is None: + values["rules"] = [] + + if "rules" not in data or data["rules"] is None: + data["rules"] = [] + + for rule in data.get("rules", []): + values["rules"].append(PolicyRule(**rule)) + + return values diff --git a/icekube/models/clusterrolebinding.py b/icekube/models/clusterrolebinding.py new file mode 100644 index 0000000..1dbc230 --- /dev/null +++ b/icekube/models/clusterrolebinding.py @@ -0,0 +1,97 @@ +from __future__ import annotations + +import json +from typing import Any, Dict, List, Optional, Union + +from icekube.models.base import RELATIONSHIP, Resource +from icekube.models.clusterrole import ClusterRole +from icekube.models.group import Group +from icekube.models.role import Role +from icekube.models.serviceaccount import ServiceAccount +from icekube.models.user import User +from icekube.neo4j import find_or_mock, get_cluster_object, mock +from pydantic import root_validator +from pydantic.fields import Field + + +def get_role( + role_ref: Dict[str, Any], + namespace: Optional[str] = None, +) -> Union[ClusterRole, Role]: + role_ref["kind"] = role_ref.get("kind", "ClusterRole") + if role_ref["kind"] == "ClusterRole": + return find_or_mock(ClusterRole, name=role_ref["name"]) + elif role_ref["kind"] == "Role": + return find_or_mock( + Role, + name=role_ref["name"], + namespace=role_ref.get("namespace", namespace), + ) + else: + raise Exception(f"Unknown RoleRef kind: {role_ref['kind']}") + + +def get_subjects( + subjects: List[Dict[str, Any]], + namespace: Optional[str] = None, +) -> List[Union[ServiceAccount, User, Group]]: + results: List[Union[ServiceAccount, User, Group]] = [] + + if subjects is None: + return results + + for subject in subjects: + if subject["kind"] in ["SystemUser", "User"]: + results.append(User(name=subject["name"])) + elif subject["kind"] in ["SystemGroup", "Group"]: + results.append(Group(name=subject["name"])) + elif subject["kind"] == "ServiceAccount": + results.append( + mock( + ServiceAccount, + name=subject["name"], + namespace=subject.get("namespace", namespace), + ), + ) + else: + raise Exception(f"Unknown Subject Kind: {subject['kind']}") + + return results + + +class ClusterRoleBinding(Resource): + role: Union[ClusterRole, Role] + subjects: List[Union[ServiceAccount, User, Group]] = Field(default_factory=list) + + @root_validator(pre=True) + def inject_role_and_subjects(cls, values): + data = json.loads(values.get("raw", "{}")) + + role_ref = data.get("roleRef") + if role_ref: + values["role"] = get_role(role_ref) + else: + values["role"] = ClusterRole(name="") + + values["subjects"] = get_subjects(data.get("subjects", [])) + + return values + + def relationships( + self, + initial: bool = True, + ) -> List[RELATIONSHIP]: + relationships = super().relationships() + relationships += [(self, "GRANTS_PERMISSION", self.role)] + relationships += [(subject, "BOUND_TO", self) for subject in self.subjects] + + if not initial: + for role_rule in self.role.rules: + if role_rule.contains_csr_approval: + relationships.append( + (self, "HAS_CSR_APPROVAL", get_cluster_object()), + ) + for relationship, resource in role_rule.affected_resource_query(): + relationships.append((self, relationship, resource)) + + return relationships diff --git a/icekube/models/group.py b/icekube/models/group.py new file mode 100644 index 0000000..7ecd1dd --- /dev/null +++ b/icekube/models/group.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from typing import Dict + +from icekube.models.base import Resource + + +class Group(Resource): + plural: str = "groups" + + @property + def unique_identifiers(self) -> Dict[str, str]: + return {**super().unique_identifiers, "plural": self.plural} diff --git a/icekube/models/namespace.py b/icekube/models/namespace.py new file mode 100644 index 0000000..5cc374c --- /dev/null +++ b/icekube/models/namespace.py @@ -0,0 +1,7 @@ +from __future__ import annotations + +from icekube.models.base import Resource + + +class Namespace(Resource): + ... diff --git a/icekube/models/node.py b/icekube/models/node.py new file mode 100644 index 0000000..5889620 --- /dev/null +++ b/icekube/models/node.py @@ -0,0 +1,7 @@ +from __future__ import annotations + +from icekube.models.base import Resource + + +class Node(Resource): + ... diff --git a/icekube/models/pod.py b/icekube/models/pod.py new file mode 100644 index 0000000..b01fcb1 --- /dev/null +++ b/icekube/models/pod.py @@ -0,0 +1,265 @@ +from __future__ import annotations + +import json +from itertools import product +from pathlib import Path +from typing import Any, Dict, List, Optional, cast + +from icekube.models.base import RELATIONSHIP, Resource +from icekube.models.node import Node +from icekube.models.secret import Secret +from icekube.models.serviceaccount import ServiceAccount +from icekube.neo4j import mock +from pydantic import root_validator + +CAPABILITIES = [ + "AUDIT_CONTROL", + "AUDIT_READ", + "AUDIT_WRITE", + "BLOCK_SUSPEND", + "BPF", + "CHECKPOINT_RESTORE", + "CHOWN", + "DAC_OVERRIDE", + "DAC_READ_SEARCH", + "FOWNER", + "FSETID", + "IPC_LOCK", + "IPC_OWNER", + "KILL", + "LEASE", + "LINUX_IMMUTABLE", + "MAC_ADMIN", + "MAC_OVERRIDE", + "MKNOD", + "NET_ADMIN", + "NET_BIND_SERVICE", + "NET_BROADCAST", + "NET_RAW", + "PERFMON", + "SETFCAP", + "SETGID", + "SETPCAP", + "SETUID", + "SYSLOG", + "SYS_ADMIN", + "SYS_BOOT", + "SYS_CHROOT", + "SYS_MODULE", + "SYS_NICE", + "SYS_PACCT", + "SYS_PTRACE", + "SYS_RAWIO", + "SYS_RESOURCE", + "SYS_TIME", + "SYS_TTY_CONFIG", + "WAKE_ALARM", +] + + +class Pod(Resource): + service_account: Optional[ServiceAccount] + node: Optional[Node] + containers: List[Dict[str, Any]] + capabilities: List[str] + host_path_volumes: List[str] + privileged: bool + hostPID: bool + hostNetwork: bool + + @root_validator(pre=True) + def inject_service_account(cls, values): + data = json.loads(values.get("raw", "{}")) + sa = data.get("spec", {}).get("serviceAccountName") + if sa: + values["service_account"] = mock( + ServiceAccount, + name=sa, + namespace=values.get("namespace"), + ) + else: + values["service_account"] = None + return values + + @root_validator(pre=True) + def inject_node(cls, values): + data = json.loads(values.get("raw", "{}")) + node = data.get("spec", {}).get("nodeName") + if node: + values["node"] = mock(Node, name=node) + else: + values["node"] = None + + return values + + @root_validator(pre=True) + def inject_containers(cls, values): + data = json.loads(values.get("raw", "{}")) + + values["containers"] = data.get("spec", {}).get("containers", []) + + return values + + @root_validator(pre=True) + def inject_capabilities(cls, values): + data = json.loads(values.get("raw", "{}")) + + containers = data.get("spec", {}).get("containers", []) + capabilities = set() + + for container in containers: + security_context = container.get("securityContext") or {} + caps = security_context.get("capabilities") or {} + addl = caps.get("add") or [] + addl = [x.upper() for x in addl] + add = set(addl) + + if "ALL" in add: + add.remove("ALL") + add.update(set(CAPABILITIES)) + + capabilities.update(add) + + values["capabilities"] = list(capabilities) + + return values + + @root_validator(pre=True) + def inject_privileged(cls, values): + data = json.loads(values.get("raw", "{}")) + + containers = data.get("spec", {}).get("containers", []) + privileged = False + for container in containers: + context = container.get("securityContext") or {} + + if context.get("privileged", False): + privileged = True + + values["privileged"] = privileged + + return values + + @root_validator(pre=True) + def inject_host_path_volumes(cls, values): + data = json.loads(values.get("raw", "{}")) + volumes = data.get("spec", {}).get("volumes") or [] + host_volumes = [x for x in volumes if "hostPath" in x and x["hostPath"]] + + values["host_path_volumes"] = [x["hostPath"]["path"] for x in host_volumes] + + return values + + @root_validator(pre=True) + def inject_host_pid(cls, values): + data = json.loads(values.get("raw", "{}")) + + values["hostPID"] = data.get("spec", {}).get("hostPID") or False + + return values + + @root_validator(pre=True) + def inject_host_network(cls, values): + data = json.loads(values.get("raw", "{}")) + + values["hostNetwork"] = data.get("spec", {}).get("hostNetwork") or False + + return values + + @property + def dangerous_host_path(self) -> bool: + # Dangerous paths to check for + # Not all of these give direct node compromise, but will grant enough + # permissions to maybe steal certificates to help with API server + # as the node, or the like + # TODO: account for wildcards for /home/*/.ssh/, etc + dangerous_paths = [ + "/etc/kubernetes/admin.conf", + "/etc/kubernetes/kubeconfig", + "/etc/shadow", + "/proc/sys/kernel", + "/root/.kube/config", + "/root/.ssh/authorized_keys", + "/run/containerd/containerd.sock", + "/run/containerd/containerd.sock", + "/run/crio/crio.sock", + "/run/cri-dockerd.sock", + "/run/docker.sock", + "/run/dockershim.sock", + "/var/lib/kubelet/pods/", + "/var/lib/kubernetes/", + "/var/lib/minikube/certs/apiserver.key", + "/var/log", + "/var/run/containerd/containerd.sock", + "/var/run/containerd/containerd.sock", + "/var/run/crio/crio.sock", + "/var/run/cri-dockerd.sock", + "/var/run/docker.sock", + "/var/run/dockershim.sock", + ] + for volume, test_path in product(self.host_path_volumes, dangerous_paths): + try: + Path(test_path).relative_to(Path(volume)) + return True + except ValueError: + pass + return False + + @property + def mounted_secrets(self) -> List[str]: + if self.raw: + data = json.loads(self.raw) + else: + return [] + + secrets = [] + + volumes = data.get("spec", {}).get("volumes") or [] + + for volume in volumes: + if volume.get("secret"): + secrets.append(volume["secret"]["secretName"]) + + for container in data.get("spec", {}).get("containers") or []: + if not container.get("env"): + continue + for env in container["env"]: + try: + secrets.append(env["valueFrom"]["secretKeyRef"]["name"]) + except (KeyError, TypeError): + pass + + return secrets + + @property + def db_labels(self) -> Dict[str, Any]: + return { + **super().db_labels, + "capabilities": self.capabilities, + "host_path_volumes": self.host_path_volumes, + "dangerous_host_path": self.dangerous_host_path, + "privileged": self.privileged, + "hostPID": self.hostPID, + "hostNetwork": self.hostNetwork, + } + + def relationships( + self, + initial: bool = True, + ) -> List[RELATIONSHIP]: + relationships = super().relationships() + # TODO: Also check if mounted + if self.service_account: + relationships += [(self, "USES_ACCOUNT", self.service_account)] + if self.node: + relationships += [(self.node, "HOSTS_POD", self)] + for secret in self.mounted_secrets: + relationships += [ + ( + self, + "MOUNTS_SECRET", + mock(Secret, namespace=cast(str, self.namespace), name=secret), + ), + ] + + return relationships diff --git a/icekube/models/policyrule.py b/icekube/models/policyrule.py new file mode 100644 index 0000000..c642a0a --- /dev/null +++ b/icekube/models/policyrule.py @@ -0,0 +1,116 @@ +import itertools +from fnmatch import filter as fnfilter +from fnmatch import fnmatch +from typing import Dict, Iterator, List, Optional, Tuple, Union + +from pydantic import BaseModel +from pydantic.fields import Field + + +def generate_query( + filters: Dict[str, Union[str, List[str]]] +) -> Tuple[str, Dict[str, str]]: + query = "MATCH ({prefix}) WHERE" + final_filters = {} + query_parts = [] + for key, value in filters.items(): + if isinstance(value, list): + part = " OR ".join( + f"{{prefix}}.{key} =~ ${{prefix}}_{key}_{idx}" + for idx in range(len(value)) + ) + query_parts.append(f" ({part}) ") + for idx, v in enumerate(value): + final_filters[f"{key}_{idx}"] = v + else: + query_parts.append(f" {{prefix}}.{key} =~ ${{prefix}}_{key} ") + final_filters[key] = value + query += "AND".join(query_parts) + return query, final_filters + + +def remove_version(group): + if "/" in group: + return group.split("/")[0] + else: + return "" + + +class PolicyRule(BaseModel): + apiGroups: List[str] = Field(default_factory=list) + nonResourceURLs: List[str] = Field(default_factory=list) + resourceNames: List[str] = Field(default_factory=list) + resources: List[str] = Field(default_factory=list) + verbs: List[str] = Field(default_factory=list) + + @property + def contains_csr_approval(self) -> bool: + resource = any( + fnmatch("certificatesigningrequests/approval", x) for x in self.resources + ) + verb = any(fnmatch("update", x) for x in self.verbs) + + return resource and verb + + def api_resources(self): + from icekube.kube import api_resources + + for api_group, resource in itertools.product(self.apiGroups, self.resources): + for res in api_resources(): + if fnmatch(remove_version(res.group), api_group) and fnmatch( + res.name, + resource, + ): + yield res + + def affected_resource_query( + self, + namespace: Optional[str] = None, + ) -> Iterator[Tuple[Union[str, List[str]], Tuple[str, Dict[str, str]]]]: + for api_resource in self.api_resources(): + resource = api_resource.name + sub_resource = None + if "/" in resource: + resource, sub_resource = resource.split("/") + sub_resource.replace("-", "_") + + find_filter = {"apiVersion": api_resource.group, "plural": resource} + if namespace: + find_filter["namespace"] = namespace + + valid_verbs = set() + for verb in self.verbs: + valid_verbs.update(fnfilter(api_resource.verbs, verb.lower())) + + if "create" in valid_verbs and sub_resource is None: + if namespace: + query_filter: Dict[str, Union[str, List[str]]] = { + "kind": "Namespace", + "name": namespace, + } + else: + query_filter = {"kind": "Cluster"} + yield ( + f"GRANTS_{resource}_CREATE".upper().replace("-", "_"), + generate_query(query_filter), + ) + query_filter = {"kind": "Namespace"} + yield ( + f"GRANTS_{resource}_CREATE".upper().replace("-", "_"), + generate_query(query_filter), + ) + valid_verbs.remove("create") + + if not valid_verbs: + continue + + if sub_resource is None: + tags = [f"GRANTS_{verb}".upper() for verb in valid_verbs] + else: + tags = [f"GRANTS_{sub_resource}_{verb}".upper() for verb in valid_verbs] + + if not self.resourceNames: + yield (tags, generate_query(find_filter)) + else: + names = [name.replace("*", ".*") for name in self.resourceNames] + yield (tags, generate_query({**find_filter, "name": names})) diff --git a/icekube/models/role.py b/icekube/models/role.py new file mode 100644 index 0000000..df832de --- /dev/null +++ b/icekube/models/role.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +import json +from typing import List + +from icekube.models.base import Resource +from icekube.models.policyrule import PolicyRule +from pydantic import root_validator +from pydantic.fields import Field + + +class Role(Resource): + rules: List[PolicyRule] = Field(default_factory=list) + + @root_validator(pre=True) + def inject_role(cls, values): + data = json.loads(values.get("raw", "{}")) + + if "rules" not in values: + values["rules"] = [] + + for rule in data.get("rules", []) or []: + values["rules"].append(PolicyRule(**rule)) + + return values diff --git a/icekube/models/rolebinding.py b/icekube/models/rolebinding.py new file mode 100644 index 0000000..844fece --- /dev/null +++ b/icekube/models/rolebinding.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import json +from typing import List, Union + +from icekube.models.base import RELATIONSHIP, Resource +from icekube.models.clusterrole import ClusterRole +from icekube.models.clusterrolebinding import get_role, get_subjects +from icekube.models.group import Group +from icekube.models.role import Role +from icekube.models.serviceaccount import ServiceAccount +from icekube.models.user import User +from pydantic import root_validator +from pydantic.fields import Field + + +class RoleBinding(Resource): + role: Union[ClusterRole, Role] + subjects: List[Union[ServiceAccount, User, Group]] = Field(default_factory=list) + + @root_validator(pre=True) + def inject_role_and_subjects(cls, values): + data = json.loads(values.get("raw", "{}")) + + ns = values.get("namespace") + + role_ref = data.get("roleRef") + if role_ref: + values["role"] = get_role(role_ref, ns) + else: + values["role"] = ClusterRole(name="") + + values["subjects"] = get_subjects(data.get("subjects", []), ns) + + return values + + def relationships( + self, + initial: bool = True, + ) -> List[RELATIONSHIP]: + relationships = super().relationships() + relationships += [(self, "GRANTS_PERMISSION", self.role)] + relationships += [(subject, "BOUND_TO", self) for subject in self.subjects] + + if not initial: + for role_rule in self.role.rules: + for relationship, resource in role_rule.affected_resource_query( + self.namespace, + ): + relationships.append((self, relationship, resource)) + + return relationships diff --git a/icekube/models/secret.py b/icekube/models/secret.py new file mode 100644 index 0000000..47975e6 --- /dev/null +++ b/icekube/models/secret.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import json +from typing import Any, Dict, List, cast + +from icekube.models.base import RELATIONSHIP, Resource +from icekube.neo4j import mock +from pydantic import root_validator + + +class Secret(Resource): + secret_type: str + annotations: Dict[str, Any] + + @root_validator(pre=True) + def remove_secret_data(cls, values): + data = json.loads(values.get("raw", "{}")) + if "data" in data: + del data["data"] + + values["raw"] = json.dumps(data) + + return values + + @root_validator(pre=True) + def extract_type(cls, values): + data = json.loads(values.get("raw", "{}")) + values["secret_type"] = data.get("type", "") + + return values + + @root_validator(pre=True) + def extract_annotations(cls, values): + data = json.loads(values.get("raw", "{}")) + values["annotations"] = data.get("metadata", {}).get("annotations") or {} + + return values + + def relationships(self, initial: bool = True) -> List[RELATIONSHIP]: + relationships = super().relationships() + + if self.secret_type == "kubernetes.io/service-account-token": + from icekube.models.serviceaccount import ServiceAccount + + sa = self.annotations.get("kubernetes.io/service-account.name") + if sa: + account = mock( + ServiceAccount, + name=sa, + namespace=cast(str, self.namespace), + ) + relationships.append( + ( + self, + "AUTHENTICATION_TOKEN_FOR", + account, + ), + ) + + return relationships diff --git a/icekube/models/securitycontextconstraints.py b/icekube/models/securitycontextconstraints.py new file mode 100644 index 0000000..0e063a5 --- /dev/null +++ b/icekube/models/securitycontextconstraints.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +import json +from typing import List, Union + +from icekube.models.base import RELATIONSHIP, Resource +from icekube.models.group import Group +from icekube.models.serviceaccount import ServiceAccount +from icekube.models.user import User +from icekube.neo4j import mock +from pydantic import root_validator +from pydantic.fields import Field + + +class SecurityContextConstraints(Resource): + plural: str = "securitycontextconstraints" + users: List[Union[User, ServiceAccount]] = Field(default_factory=list) + groups: List[Group] + + @root_validator(pre=True) + def inject_users_and_groups(cls, values): + data = json.loads(values.get("raw", {})) + + users = data.get("users", []) + values["users"] = [] + for user in users: + if user.startswith("system:serviceaccount:"): + ns, name = user.split(":")[2:] + values["users"].append( + mock( + ServiceAccount, + name=name, + namespace=ns, + ), + ) + else: + values["users"].append(mock(User, name=user)) + + groups = data.get("groups", []) + values["groups"] = [] + for group in groups: + values["groups"].append(mock(Group, name=group)) + + return values + + def relationships(self, initial: bool = True) -> List[RELATIONSHIP]: + relationships = super().relationships() + + relationships += [(x, "GRANTS_USE", self) for x in self.users + self.groups] + + return relationships diff --git a/icekube/models/serviceaccount.py b/icekube/models/serviceaccount.py new file mode 100644 index 0000000..7e04f63 --- /dev/null +++ b/icekube/models/serviceaccount.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +import json +from typing import List + +from icekube.models.base import RELATIONSHIP, Resource +from icekube.models.secret import Secret +from icekube.neo4j import mock +from pydantic import root_validator +from pydantic.fields import Field + + +class ServiceAccount(Resource): + secrets: List[Secret] = Field(default_factory=list) + + @root_validator(pre=True) + def inject_secrets(cls, values): + data = json.loads(values.get("raw", "{}")) + + if "secrets" not in values: + values["secrets"] = [] + + if "secrets" in data and data["secrets"] is None: + data["secrets"] = [] + + for secret in data.get("secrets", []): + values["secrets"].append( + mock( + Secret, + name=secret.get("name", ""), + namespace=data.get("metadata", {}).get("namespace", ""), + ), + ) + + return values + + def relationships( + self, + initial: bool = True, + ) -> List[RELATIONSHIP]: + relationships = super().relationships() + relationships += [(x, "AUTHENTICATION_TOKEN_FOR", self) for x in self.secrets] + return relationships diff --git a/icekube/models/signer.py b/icekube/models/signer.py new file mode 100644 index 0000000..e155d2d --- /dev/null +++ b/icekube/models/signer.py @@ -0,0 +1,19 @@ +from typing import Dict + +from icekube.models.base import Resource + + +class Signer(Resource): + apiVersion: str = "certificates.k8s.io/v1" + kind: str = "Signer" + plural: str = "signers" + + def __repr__(self) -> str: + return f"Signer(name={self.name})" + + @property + def db_labels(self) -> Dict[str, str]: + return { + **self.unique_identifiers, + "plural": self.plural, + } diff --git a/icekube/models/user.py b/icekube/models/user.py new file mode 100644 index 0000000..b497c9e --- /dev/null +++ b/icekube/models/user.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from typing import Dict + +from icekube.models.base import Resource + + +class User(Resource): + plural: str = "users" + + @property + def unique_identifiers(self) -> Dict[str, str]: + return {**super().unique_identifiers, "plural": self.plural} diff --git a/icekube/neo4j.py b/icekube/neo4j.py new file mode 100644 index 0000000..868dbe4 --- /dev/null +++ b/icekube/neo4j.py @@ -0,0 +1,154 @@ +from __future__ import annotations + +import logging +from typing import Any, Dict, Generator, List, Optional, Tuple, Type, TypeVar + +from icekube.config import config +from icekube.models import Cluster, Resource +from neo4j import BoltDriver, GraphDatabase +from neo4j.io import ServiceUnavailable + +T = TypeVar("T") + +logger = logging.getLogger(__name__) + + +driver: Optional[BoltDriver] = None + + +def get_driver() -> BoltDriver: + global driver + + if not driver: + driver = init_connection() + + return driver + + +def init_connection( + uri: str = "bolt://localhost:7687", + auth: Tuple[str, str] = ("neo4j", "neo4j"), + encrypted: bool = False, +) -> BoltDriver: + neo4j_config = config.get("neo4j", {}) + uri = neo4j_config.get("url", uri) + auth = ( + neo4j_config.get("username", auth[0]), + neo4j_config.get("password", auth[1]), + ) + encrypted = neo4j_config.get("encrypted", encrypted) + + return GraphDatabase.driver(uri, auth=auth, encrypted=encrypted) + + +def create_index(kind: str, namespace: bool) -> None: + cmd = f"CREATE INDEX {kind.lower()} FOR (n:{kind}) ON (n.name" + if namespace: + cmd += ", n.namespace" + cmd += ")" + + driver = get_driver() + + with driver.session() as session: + session.run(cmd) + + +def get( + resource: Resource, + identifier: str = "", + prefix: str = "", +) -> Tuple[str, Dict[str, str]]: + kwargs: Dict[str, str] = {} + labels: List[str] = [] + identifier = identifier or prefix + + if prefix: + prefix += "_" + + for key, value in resource.unique_identifiers.items(): + labels.append(f"{key}: ${prefix}{key}") + kwargs[f"{prefix}{key}"] = value + + cmd = f"MERGE ({identifier}:{resource.kind} {{ {', '.join(labels)} }}) " + + return cmd, kwargs + + +def create(resource: Resource, prefix: str = "") -> Tuple[str, Dict[str, Any]]: + cmd, kwargs = get(resource, "x", prefix) + + labels: List[str] = [] + + if prefix: + prefix += "_" + + for key, value in resource.db_labels.items(): + labels.append(f"{key}: ${prefix}{key}") + kwargs[f"{prefix}{key}"] = value + + cmd += f"SET x += {{ {', '.join(labels)} }} " + + return cmd, kwargs + + +def find( + resource: Optional[Type[Resource]] = None, + raw: bool = False, + **kwargs: str, +) -> Generator[Resource, None, None]: + labels = [f"{key}: ${key}" for key in kwargs.keys()] + if resource is None or resource is Resource: + cmd = f"MATCH (x {{ {', '.join(labels)} }}) " + else: + cmd = f"MATCH (x:{resource.__name__} {{ {', '.join(labels)} }}) " + + if raw: + cmd += "WHERE EXISTS (x.raw) " + + cmd += "RETURN x" + + driver = get_driver() + + with driver.session() as session: + logger.debug(f"Starting neo4j query: {cmd}, {kwargs}") + results = session.run(cmd, kwargs) + + for result in results: + result = result[0] + props = result._properties + logger.debug( + f"Loading resource: {props['kind']} " + f"{props.get('namespace', '')} {props['name']}", + ) + + if resource is None: + res = Resource(**props) + else: + res = resource(**props) + + yield res + + +def find_or_mock(resource: Type[T], **kwargs: str) -> T: + try: + return next(find(resource, **kwargs)) # type: ignore + except (StopIteration, IndexError, ServiceUnavailable): + return resource(**kwargs) + + +def mock(resource: Type[T], **kwargs: str) -> T: + return resource(**kwargs) + + +cluster: Optional[Cluster] = None + + +def get_cluster_object() -> Cluster: + global cluster + + if cluster: + return cluster + + cluster = find_or_mock(Cluster, kind="Cluster") + + return cluster diff --git a/icekube/utils.py b/icekube/utils.py new file mode 100644 index 0000000..4f691be --- /dev/null +++ b/icekube/utils.py @@ -0,0 +1,8 @@ +import re + + +def to_camel_case(string: str) -> str: + string = re.sub(r"([A-Z]+)([A-Z][a-z])", r"\1_\2", string) + string = re.sub(r"([a-z\d])([A-Z])", r"\1_\2", string) + string = string.replace("-", "_") + return string.lower() diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..1c25067 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,24 @@ +[mypy] +mypy_path = stubs + +warn_unused_ignores = True +warn_return_any = True + +strict_optional = True +no_implicit_optional = True + +#disallow_any_unimported = True +#disallow_any_expr = True +#disallow_any_decorated = True +#disallow_any_explicit = True +disallow_subclassing_any = True +disallow_any_generics = True + +# disallow_untyped_calls = True +# disallow_untyped_defs = True +# disallow_incomplete_defs = True +disallow_untyped_decorators = True + +check_untyped_defs = True + +ignore_missing_imports = True diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..196306c --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1589 @@ +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + +[[package]] +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = "*" +files = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +optional = false +python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + +[[package]] +name = "black" +version = "23.10.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, + {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, + {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, + {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, + {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, + {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, + {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, + {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, + {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, + {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, + {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.3.2" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "entrypoints" +version = "0.3" +description = "Discover and load entry points from installed packages." +optional = false +python-versions = ">=2.7" +files = [ + {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, + {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + +[[package]] +name = "fancycompleter" +version = "0.9.1" +description = "colorful TAB completion for Python prompt" +optional = false +python-versions = "*" +files = [ + {file = "fancycompleter-0.9.1-py3-none-any.whl", hash = "sha256:dd076bca7d9d524cc7f25ec8f35ef95388ffef9ef46def4d3d25e9b044ad7080"}, + {file = "fancycompleter-0.9.1.tar.gz", hash = "sha256:09e0feb8ae242abdfd7ef2ba55069a46f011814a80fe5476be48f51b00247272"}, +] + +[package.dependencies] +pyreadline = {version = "*", markers = "platform_system == \"Windows\""} +pyrepl = ">=0.8.2" + +[[package]] +name = "flake8" +version = "3.7.9" +description = "the modular source code checker: pep8, pyflakes and co" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "flake8-3.7.9-py2.py3-none-any.whl", hash = "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca"}, + {file = "flake8-3.7.9.tar.gz", hash = "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb"}, +] + +[package.dependencies] +entrypoints = ">=0.3.0,<0.4.0" +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.5.0,<2.6.0" +pyflakes = ">=2.1.0,<2.2.0" + +[[package]] +name = "flake8-comprehensions" +version = "3.14.0" +description = "A flake8 plugin to help you write better list/set/dict comprehensions." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flake8_comprehensions-3.14.0-py3-none-any.whl", hash = "sha256:7b9d07d94aa88e62099a6d1931ddf16c344d4157deedf90fe0d8ee2846f30e97"}, + {file = "flake8_comprehensions-3.14.0.tar.gz", hash = "sha256:81768c61bfc064e1a06222df08a2580d97de10cb388694becaf987c331c6c0cf"}, +] + +[package.dependencies] +flake8 = ">=3.0,<3.2.0 || >3.2.0" + +[[package]] +name = "flake8-debugger" +version = "4.1.2" +description = "ipdb/pdb statement checker plugin for flake8" +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840"}, + {file = "flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf"}, +] + +[package.dependencies] +flake8 = ">=3.0" +pycodestyle = "*" + +[[package]] +name = "flake8-docstrings" +version = "1.7.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, + {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, +] + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "flake8-isort" +version = "5.0.3" +description = "flake8 plugin that integrates isort ." +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-isort-5.0.3.tar.gz", hash = "sha256:0951398c343c67f4933407adbbfb495d4df7c038650c5d05753a006efcfeb390"}, + {file = "flake8_isort-5.0.3-py3-none-any.whl", hash = "sha256:8c4ab431d87780d0c8336e9614e50ef11201bc848ef64ca017532dec39d4bf49"}, +] + +[package.dependencies] +flake8 = "*" +isort = ">=4.3.5,<6" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "flake8-mutable" +version = "1.2.0" +description = "mutable defaults flake8 extension" +optional = false +python-versions = "*" +files = [ + {file = "flake8-mutable-1.2.0.tar.gz", hash = "sha256:ee9b77111b867d845177bbc289d87d541445ffcc6029a0c5c65865b42b18c6a6"}, + {file = "flake8_mutable-1.2.0-py2-none-any.whl", hash = "sha256:38fd9dadcbcda6550a916197bc40ed76908119dabb37fbcca30873666c31d2d5"}, +] + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "flake8-todo" +version = "0.7" +description = "TODO notes checker, plugin for flake8" +optional = false +python-versions = "*" +files = [ + {file = "flake8-todo-0.7.tar.gz", hash = "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915"}, +] + +[package.dependencies] +pycodestyle = ">=2.0.0,<3.0.0" + +[[package]] +name = "google-auth" +version = "2.23.4" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.23.4.tar.gz", hash = "sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3"}, + {file = "google_auth-2.23.4-py2.py3-none-any.whl", hash = "sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipython" +version = "8.12.3" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, + {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + +[[package]] +name = "isort" +version = "4.3.21" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "isort-4.3.21-py2.py3-none-any.whl", hash = "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"}, + {file = "isort-4.3.21.tar.gz", hash = "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1"}, +] + +[package.dependencies] +toml = {version = "*", optional = true, markers = "extra == \"pyproject\""} + +[package.extras] +pipfile = ["pipreqs", "requirementslib"] +pyproject = ["toml"] +requirements = ["pip-api", "pipreqs"] +xdg-home = ["appdirs (>=1.4.0)"] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "kubernetes" +version = "28.1.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +files = [ + {file = "kubernetes-28.1.0-py2.py3-none-any.whl", hash = "sha256:10f56f8160dcb73647f15fafda268e7f60cf7dbc9f8e46d52fcd46d3beb0c18d"}, + {file = "kubernetes-28.1.0.tar.gz", hash = "sha256:1468069a573430fb1cb5ad22876868f57977930f80a6749405da31cd6086a7e9"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +six = ">=1.9.0" +urllib3 = ">=1.24.2,<2.0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = "*" +files = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] + +[[package]] +name = "mypy" +version = "1.6.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, + {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, + {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, + {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, + {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, + {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, + {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, + {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, + {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, + {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, + {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, + {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, + {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, + {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, + {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, + {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, + {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, + {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "neo4j" +version = "4.0.dev0" +description = "Neo4j Bolt driver for Python" +optional = false +python-versions = "*" +files = [] +develop = false + +[package.dependencies] +pytz = "*" + +[package.source] +type = "git" +url = "https://github.com/neo4j/neo4j-python-driver.git" +reference = "4.0" +resolved_reference = "901337d7f392ad8a6765da6848e90fb9f44fabe8" + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "pdbpp" +version = "0.10.3" +description = "pdb++, a drop-in replacement for pdb" +optional = false +python-versions = "*" +files = [ + {file = "pdbpp-0.10.3-py2.py3-none-any.whl", hash = "sha256:79580568e33eb3d6f6b462b1187f53e10cd8e4538f7d31495c9181e2cf9665d1"}, + {file = "pdbpp-0.10.3.tar.gz", hash = "sha256:d9e43f4fda388eeb365f2887f4e7b66ac09dce9b6236b76f63616530e2f669f5"}, +] + +[package.dependencies] +fancycompleter = ">=0.8" +pygments = "*" +wmctrl = "*" + +[package.extras] +funcsigs = ["funcsigs"] +testing = ["funcsigs", "pytest"] + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +optional = false +python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + +[[package]] +name = "platformdirs" +version = "3.11.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.39" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, + {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pyasn1" +version = "0.5.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, + {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.6.0" + +[[package]] +name = "pycodestyle" +version = "2.5.0" +description = "Python style guide checker" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycodestyle-2.5.0-py2.py3-none-any.whl", hash = "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56"}, + {file = "pycodestyle-2.5.0.tar.gz", hash = "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"}, +] + +[[package]] +name = "pydantic" +version = "2.4.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, + {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.10.1" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.10.1" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, + {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, + {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, + {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, + {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, + {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, + {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, + {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, + {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, + {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, + {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, + {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"}, + {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"}, + {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"}, + {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"}, + {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"}, + {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"}, + {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"}, + {file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"}, + {file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"}, + {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"}, + {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"}, + {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"}, + {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"}, + {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"}, + {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"}, + {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"}, + {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"}, + {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"}, + {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"}, + {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"}, + {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, + {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydocstyle" +version = "6.3.0" +description = "Python docstring style checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, +] + +[package.dependencies] +snowballstemmer = ">=2.2.0" + +[package.extras] +toml = ["tomli (>=1.2.3)"] + +[[package]] +name = "pyflakes" +version = "2.1.1" +description = "passive checker of Python programs" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [] +develop = false + +[package.source] +type = "git" +url = "https://github.com/pycqa/pyflakes" +reference = "c688d2b02ac6e3416775b88d6411ee6a3e8a51ec" +resolved_reference = "c688d2b02ac6e3416775b88d6411ee6a3e8a51ec" + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyreadline" +version = "2.1" +description = "A python implmementation of GNU readline." +optional = false +python-versions = "*" +files = [ + {file = "pyreadline-2.1.zip", hash = "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1"}, +] + +[[package]] +name = "pyrepl" +version = "0.8.5.dev53+gca192a8" +description = "A library for building flexible command line interfaces" +optional = false +python-versions = "*" +files = [] +develop = false + +[package.source] +type = "git" +url = "https://github.com/pypy/pyrepl" +reference = "HEAD" +resolved_reference = "ca192a80b76700118b9bfd261a3d098b92ccfc31" + +[[package]] +name = "pytest" +version = "7.4.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytoolconfig" +version = "1.2.6" +description = "Python tool configuration" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytoolconfig-1.2.6-py3-none-any.whl", hash = "sha256:e8b2e538f11dbabc4617884d45401e0105e2d7db920cb8ae6baa94d66126a8e3"}, + {file = "pytoolconfig-1.2.6.tar.gz", hash = "sha256:f2d00ea4f8cbdffd3006780ba51016618c835b338f634e3f7f8b2715b1710889"}, +] + +[package.dependencies] +packaging = ">=22.0" +platformdirs = {version = ">=1.4.4", optional = true, markers = "extra == \"global\""} +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["sphinx (>=4.5.0)", "tabulate (>=0.8.9)"] +gendocs = ["pytoolconfig[doc]", "sphinx (>=4.5.0)", "sphinx-autodoc-typehints (>=1.18.1)", "sphinx-rtd-theme (>=1.0.0)"] +global = ["platformdirs (>=1.4.4)"] +validation = ["pydantic (>=1.7.4)"] + +[[package]] +name = "pytz" +version = "2023.3.post1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "1.3.1" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rope" +version = "1.10.0" +description = "a python refactoring library..." +optional = false +python-versions = ">=3.8" +files = [ + {file = "rope-1.10.0-py3-none-any.whl", hash = "sha256:4e9f06d7296708a8a6518590c49352c99394053d91fde5b1d74c9ffc85c76d5b"}, + {file = "rope-1.10.0.tar.gz", hash = "sha256:14bfcc7dfea69fa32c53db0667e86e321ef059de555c7f8101a0cb91c8d85a55"}, +] + +[package.dependencies] +pytoolconfig = {version = ">=1.2.2", extras = ["global"]} + +[package.extras] +dev = ["build (>=0.7.0)", "pre-commit (>=2.20.0)", "pytest (>=7.0.1)", "pytest-timeout (>=2.1.0)"] +doc = ["pytoolconfig[doc]", "sphinx (>=4.5.0)", "sphinx-autodoc-typehints (>=1.18.1)", "sphinx-rtd-theme (>=1.0.0)"] +release = ["pip-tools (>=6.12.1)", "toml (>=0.10.2)", "twine (>=4.0.2)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "setuptools" +version = "68.2.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.13.0" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.13.0-py3-none-any.whl", hash = "sha256:baf991e61542da48fe8aef8b779a9ea0aa38d8a54166ee250d5af5ecf4486619"}, + {file = "traitlets-5.13.0.tar.gz", hash = "sha256:9b232b9430c8f57288c1024b34a8f0251ddcc47268927367a0dd3eeaca40deb5"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.6.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "typer" +version = "0.9.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.6" +files = [ + {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, + {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, +] + +[package.dependencies] +click = ">=7.1.1,<9.0.0" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.12" +description = "Typing stubs for PyYAML" +optional = false +python-versions = "*" +files = [ + {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, + {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, +] + +[[package]] +name = "typing-extensions" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wcwidth" +version = "0.2.9" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.9-py2.py3-none-any.whl", hash = "sha256:9a929bd8380f6cd9571a968a9c8f4353ca58d7cd812a4822bba831f8d685b223"}, + {file = "wcwidth-0.2.9.tar.gz", hash = "sha256:a675d1a4a2d24ef67096a04b85b02deeecd8e226f57b5e3a72dbb9ed99d27da8"}, +] + +[[package]] +name = "websocket-client" +version = "1.6.4" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.6.4.tar.gz", hash = "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df"}, + {file = "websocket_client-1.6.4-py3-none-any.whl", hash = "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "wmctrl" +version = "0.5" +description = "A tool to programmatically control windows inside X" +optional = false +python-versions = ">=2.7" +files = [ + {file = "wmctrl-0.5-py2.py3-none-any.whl", hash = "sha256:ae695c1863a314c899e7cf113f07c0da02a394b968c4772e1936219d9234ddd7"}, + {file = "wmctrl-0.5.tar.gz", hash = "sha256:7839a36b6fe9e2d6fd22304e5dc372dbced2116ba41283ea938b2da57f53e962"}, +] + +[package.dependencies] +attrs = "*" + +[package.extras] +test = ["pytest"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8" +content-hash = "83f23fd0b974c36153aecbe01bd873be6ebc9f415366493640fd7c96778080e0" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0a54cda --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,42 @@ +[tool.poetry] +name = "icekube" +version = "1.0.0" +description = "" +authors = ["Mohit Gupta