diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index a044130..0000000 --- a/.coveragerc +++ /dev/null @@ -1,2 +0,0 @@ -[run] -source = pyro_risks \ No newline at end of file diff --git a/.dvc/.gitignore b/.dvc/.gitignore deleted file mode 100644 index 528f30c..0000000 --- a/.dvc/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -/config.local -/tmp -/cache diff --git a/.dvc/config b/.dvc/config deleted file mode 100644 index 432e549..0000000 --- a/.dvc/config +++ /dev/null @@ -1,6 +0,0 @@ -['remote "artifacts-registry"'] - url = gdrive://1fyD6xyuWWhyjNPoCiCazKshp0yJ8xKs9 - gdrive_use_service_account = true - gdrive_service_account_json_file_path = tmp/credentials.json -[core] - remote = artifacts-registry \ No newline at end of file diff --git a/.dvcignore b/.dvcignore deleted file mode 100644 index 5197305..0000000 --- a/.dvcignore +++ /dev/null @@ -1,3 +0,0 @@ -# Add patterns of files dvc should ignore, which could improve -# the performance. Learn more at -# https://dvc.org/doc/user-guide/dvcignore diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 0385d47..0000000 --- a/.flake8 +++ /dev/null @@ -1,4 +0,0 @@ -[flake8] -max-line-length = 120 -ignore = F401, E402, E265, F403, W503, W504, F821, W605 -exclude = .git, venv*, docs, build \ No newline at end of file diff --git a/.github/workflows/web-server.yml b/.github/workflows/builds.yml similarity index 69% rename from .github/workflows/web-server.yml rename to .github/workflows/builds.yml index b644a81..0b77d6d 100644 --- a/.github/workflows/web-server.yml +++ b/.github/workflows/builds.yml @@ -1,4 +1,4 @@ -name: web-server +name: builds on: push: @@ -7,14 +7,14 @@ on: branches: [ master ] jobs: - docker-ready: + docker: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Build & run docker + - uses: actions/checkout@v4 + - name: Build & run docker container env: CDS_UID: ${{ secrets.CDS_UID }} CDS_API_KEY: ${{ secrets.CDS_API_KEY }} run: PORT=8003 docker-compose up -d --build - - name: Ping app inside the container + - name: Ping containerized app run: sleep 5 && nc -vz localhost 8003 diff --git a/.github/workflows/doc-deploy.yaml b/.github/workflows/doc-deploy.yaml index 2f4213d..fb20c62 100644 --- a/.github/workflows/doc-deploy.yaml +++ b/.github/workflows/doc-deploy.yaml @@ -1,39 +1,27 @@ name: doc-deploy on: push: - branches: master + branches: [master] jobs: - docs-publish: + publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: persist-credentials: false - - name: Set up Python 3.7 + - name: Set up Python 3.10.5 uses: actions/setup-python@v1 with: - python-version: 3.7 - architecture: x64 - - name: Cache python modules - uses: actions/cache@v2 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pkg-deps-${{ hashFiles('requirements.txt') }}-${{ hashFiles('**/*.py') }} - restore-keys: | - ${{ runner.os }}-pkg-deps-${{ hashFiles('requirements.txt') }}- - ${{ runner.os }}-pkg-deps- - ${{ runner.os }}- + python-version: 3.10.5 + - name: Install dependencies - run: | - sudo apt install libspatialindex-dev python3-rtree - python -m pip install --upgrade pip - pip install -e . - pip install -r docs/requirements.txt + run: poetry install + - name: Build documentation run: | - sphinx-build docs/source docs/build -a -v + poetry run sphinx-build docs/source docs/build -a -v - name: Install SSH Client 🔑 uses: webfactory/ssh-agent@v0.4.1 diff --git a/.github/workflows/gh-page.yaml b/.github/workflows/doc-status.yaml similarity index 81% rename from .github/workflows/gh-page.yaml rename to .github/workflows/doc-status.yaml index 96e3534..22ce1d5 100644 --- a/.github/workflows/gh-page.yaml +++ b/.github/workflows/doc-status.yaml @@ -6,11 +6,11 @@ jobs: see-page-build-payload: runs-on: ubuntu-latest steps: - - name: Set up Python 3.7 - uses: actions/setup-python@v1 + - name: Set up Python 3.10.5 + uses: actions/setup-python@v4 with: - python-version: 3.7 - architecture: x64 + python-version: 3.10.5 + - name: check status run: | import os diff --git a/.github/workflows/get_today_effis_fwi.yml b/.github/workflows/get_today_effis_fwi.yml deleted file mode 100644 index c6eca27..0000000 --- a/.github/workflows/get_today_effis_fwi.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: GetTodayEffisFwi - -on: - workflow_dispatch: - schedule: - - cron: "0 1 * * *" - -jobs: - scheduled: - runs-on: ubuntu-latest - steps: - - name: Check out this repo - uses: actions/checkout@v3 - - - name: Install dependencies - run: pip install -e . - - - name: Get Today Effis Fwi - run: python pyro_risks/platform_fwi/main.py - env: - AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - BUCKET_NAME: risk - REGION_NAME: gra - ENDPOINT_URL: https://s3.gra.io.cloud.ovh.net/ - diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index e30c16f..0000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: python-package - -on: - push: - branches: [master] - pull_request: - branches: [master] - -jobs: - - flake8-py3: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.7 - uses: actions/setup-python@v1 - with: - python-version: 3.7 - architecture: x64 - - name: Run flake8 - run: | - pip install flake8 - flake8 --version - flake8 ./ - - mypy: - runs-on: ubuntu-latest - name: Mypy - steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - name: Run mypy - run: | - pip install mypy - python -m pip install --upgrade pip - mypy ./pyro_risks/ - - black: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - - uses: psf/black@stable diff --git a/.github/workflows/requirements.txt b/.github/workflows/requirements.txt deleted file mode 100644 index 7dd0fc7..0000000 --- a/.github/workflows/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -coverage>=4.5.4 diff --git a/.github/workflows/scripts.yml b/.github/workflows/scripts.yml new file mode 100644 index 0000000..e26d178 --- /dev/null +++ b/.github/workflows/scripts.yml @@ -0,0 +1,37 @@ +name: Scheduled Scripts + +on: + workflow_dispatch: + schedule: + - cron: "0 1 * * *" + +jobs: + get-effis-fwi: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Python Version + uses: actions/setup-python@v4 + with: + python-version: 3.10.5 + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.8.1 + virtualenvs-create: true + virtualenvs-in-project: true + + - name: Install dependencies + run: poetry install + + - name: Get Today Effis Fwi + run: poetry run python pyrorisks/platform_fwi/main.py + env: + AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + BUCKET_NAME: risk + REGION_NAME: gra + ENDPOINT_URL: https://s3.gra.io.cloud.ovh.net/ + diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml new file mode 100644 index 0000000..23d76eb --- /dev/null +++ b/.github/workflows/style.yml @@ -0,0 +1,80 @@ +name: style + +on: + push: + branches: [master] + pull_request: + branches: [master] + +jobs: + linting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Setup Python Version + uses: actions/setup-python@v4 + with: + python-version: 3.10.5 + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.8.1 + virtualenvs-create: true + virtualenvs-in-project: true + + - name: Install dependencies + run: poetry install + + - name: Run Ruff linter + run: | + poetry run ruff --version + poetry run ruff check --diff . + + formating: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Setup Python Version + uses: actions/setup-python@v4 + with: + python-version: 3.10.5 + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.8.1 + virtualenvs-create: true + virtualenvs-in-project: true + + - name: Install dependencies + run: poetry install + + - name: Run Ruff formatter + run: | + poetry run ruff --version + poetry run ruff format --diff . + + typing: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Setup Python Version + uses: actions/setup-python@v4 + with: + python-version: 3.10.5 + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.8.1 + virtualenvs-create: true + virtualenvs-in-project: true + + - name: Install dependencies + run: poetry install + + - name: Run mypy + run: | + poetry run mypy --version + poetry run mypy app pyrorisks diff --git a/.gitignore b/.gitignore index 2d5b3f2..1bacf7f 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,7 @@ __pycache__/ # Distribution / packaging .Python -build/ +docs/build/ develop-eggs/ dist/ downloads/ diff --git a/Aptfile b/Aptfile deleted file mode 100644 index 3590f35..0000000 --- a/Aptfile +++ /dev/null @@ -1,2 +0,0 @@ -libspatialindex-dev -python3-rtree diff --git a/Dockerfile b/Dockerfile index a4d914b..b9762d7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,28 +1,19 @@ -FROM python:3.8.1 +FROM python:3.10-buster -# set work directory -WORKDIR /usr/src/app +RUN pip install poetry==1.8.1 -# set environment variables -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 +ENV POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_IN_PROJECT=1 \ + POETRY_VIRTUALENVS_CREATE=1 \ + POETRY_CACHE_DIR=/tmp/poetry_cache \ + VIRTUAL_ENV=/app/.venv \ + PATH="/app/.venv/bin:$PATH" -# copy app requirements -COPY ./requirements.txt requirements.txt -COPY ./requirements-app.txt /usr/src/app/requirements-app.txt -COPY ./setup.py setup.py -COPY ./README.md README.md -COPY ./pyro_risks pyro_risks +WORKDIR /app -# install dependencies -RUN apt-get update && \ - apt-get install --no-install-recommends -y libspatialindex-dev python3-rtree && \ - pip install --upgrade pip setuptools wheel && \ - pip install -e . && \ - pip install -r /usr/src/app/requirements-app.txt && \ - mkdir /usr/src/app/app && \ - rm -rf /root/.cache/pip && \ - rm -rf /var/lib/apt/lists/* +COPY pyrorisks ./pyrorisks +COPY app ./app +COPY build ./build +COPY pyproject.toml poetry.lock README.md ./ -# copy project -COPY app/ /usr/src/app/app/ +RUN poetry install diff --git a/Procfile b/Procfile deleted file mode 100644 index 91e5c97..0000000 --- a/Procfile +++ /dev/null @@ -1 +0,0 @@ -web: pip install -e . && pip install -r requirements-app.txt && uvicorn --reload --workers 1 --host 0.0.0.0 --port=${PORT:-5000} app.main:app diff --git a/pyro_risks/__init__.py b/app/__init__.py similarity index 100% rename from pyro_risks/__init__.py rename to app/__init__.py diff --git a/pyro_risks/utils/__init__.py b/app/api/__init__.py similarity index 100% rename from pyro_risks/utils/__init__.py rename to app/api/__init__.py diff --git a/app/api/inference.py b/app/api/inference.py index 990153f..2c89471 100644 --- a/app/api/inference.py +++ b/app/api/inference.py @@ -3,10 +3,14 @@ # This program is licensed under the Apache License version 2. # See LICENSE or go to for full license details. -from pyro_risks.models.predict import PyroRisk - __all__ = ["predictor"] -predictor = PyroRisk(which="RF") +class Mock: + def predict(self, date): + _ = date + return {"01": {"score": 0.5, "explainability": "weather"}, "02": {"score": 0.5, "explainability": "weather"}} + + +predictor = Mock() diff --git a/app/api/routes/__init__.py b/app/api/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/routes/risk.py b/app/api/routes/risk.py index 7035b20..2dff1e9 100644 --- a/app/api/routes/risk.py +++ b/app/api/routes/risk.py @@ -20,7 +20,4 @@ async def get_pyrorisk(country: str, date: str): """Using the country identifier, this will compute the wildfire risk for all known subregions""" preds = predictor.predict(date) - return [ - RegionRisk(geocode=k, score=v["score"], explainability=v["explainability"]) - for k, v in preds.items() - ] + return [RegionRisk(geocode=k, score=v["score"], explainability=v["explainability"]) for k, v in preds.items()] diff --git a/app/api/schemas.py b/app/api/schemas.py index 53552c8..74bdf81 100644 --- a/app/api/schemas.py +++ b/app/api/schemas.py @@ -8,6 +8,6 @@ class RegionRisk(BaseModel): - geocode: str = Field(..., example="01") - score: float = Field(..., gt=0, lt=1, example=0.5) - explainability: Optional[str] = Field(None, example="weather") + geocode: str = Field(..., examples=["01"]) + score: float = Field(..., gt=0, lt=1, examples=[0.5]) + explainability: Optional[str] = Field(None, examples=["weather"]) diff --git a/app/main.py b/app/main.py index 52eef34..21fae27 100644 --- a/app/main.py +++ b/app/main.py @@ -47,4 +47,4 @@ def custom_openapi(): return app.openapi_schema -app.openapi = custom_openapi +app.openapi = custom_openapi # type: ignore[method-assign] diff --git a/build/rasterio-1.3.10-cp39-cp39-manylinux2014_x86_64.whl b/build/rasterio-1.3.10-cp39-cp39-manylinux2014_x86_64.whl new file mode 100644 index 0000000..80b1beb Binary files /dev/null and b/build/rasterio-1.3.10-cp39-cp39-manylinux2014_x86_64.whl differ diff --git a/dags/dvc/predict/dvc.lock b/dags/dvc/predict/dvc.lock deleted file mode 100644 index a91248e..0000000 --- a/dags/dvc/predict/dvc.lock +++ /dev/null @@ -1,11 +0,0 @@ -schema: '2.0' -stages: - download: - cmd: pyrorisks download inputs --day 2020-05-05 - predict: - cmd: pyrorisks predict --day 2020-05-05 - outs: - - path: .cache/predictions_registry - md5: d9926db4f35854aa453bcf54e9aa394e.dir - size: 12414168 - nfiles: 3 diff --git a/dags/dvc/predict/dvc.yaml b/dags/dvc/predict/dvc.yaml deleted file mode 100644 index 5b2b800..0000000 --- a/dags/dvc/predict/dvc.yaml +++ /dev/null @@ -1,10 +0,0 @@ -stages: - download: - wdir: ../../../ - cmd: pyrorisks download inputs --day 2020-05-05 - - predict: - wdir: ../../../ - cmd: pyrorisks predict --day 2020-05-05 - outs: - - .cache/predictions_registry diff --git a/dags/dvc/train/dvc.lock b/dags/dvc/train/dvc.lock deleted file mode 100644 index 7209457..0000000 --- a/dags/dvc/train/dvc.lock +++ /dev/null @@ -1,61 +0,0 @@ -schema: '2.0' -stages: - download: - cmd: pyrorisks download - deps: - - path: pyro_risks/pipeline/load.py - md5: d0d749f7b2c050296d98f23f93783dac - size: 2060 - outs: - - path: .cache/data_registry/merged_era_viirs.csv - md5: ac964a338a6cdddce2fdd4f1847f173a - size: 83716501 - train_xgboost: - cmd: pyrorisks train --model XGBOOST --destination .cache/model_registry - deps: - - path: .cache/data_registry/merged_era_viirs.csv - md5: ac964a338a6cdddce2fdd4f1847f173a - size: 83716501 - - path: pyro_risks/pipeline/train.py - md5: 2072ca8311d7bb9105e9210e7cd61b5a - size: 5224 - outs: - - path: .cache/model_registry/XGBOOST.joblib - md5: a5f6213848af99db0d8030c4e7ac4b59 - size: 1817148 - train_rf: - cmd: pyrorisks train --model RF --destination .cache/model_registry - deps: - - path: .cache/data_registry/merged_era_viirs.csv - md5: ac964a338a6cdddce2fdd4f1847f173a - size: 83716501 - - path: pyro_risks/pipeline/train.py - md5: 2072ca8311d7bb9105e9210e7cd61b5a - size: 5224 - outs: - - path: .cache/model_registry/RF.joblib - md5: 64a825e6a9ea80345beec0ced6ef4dd4 - size: 11947436 - evaluate: - cmd: "pyrorisks evaluate --pipeline .cache/model_registry/RF.joblib --threshold\ - \ .cache/model_registry/RF_threshold.json --prefix RF --destination .cache/metadata_registry\ - \ \npyrorisks evaluate --pipeline .cache/model_registry/XGBOOST.joblib --threshold\ - \ .cache/model_registry/XGBOOST_threshold.json --prefix XGBOOST --destination\ - \ .cache/metadata_registry\n" - deps: - - path: .cache/model_registry/RF.joblib - md5: 64a825e6a9ea80345beec0ced6ef4dd4 - size: 11947436 - - path: .cache/model_registry/XGBOOST.joblib - md5: a5f6213848af99db0d8030c4e7ac4b59 - size: 1817148 - - path: pyro_risks/pipeline/train.py - md5: 2072ca8311d7bb9105e9210e7cd61b5a - size: 5224 - outs: - - path: .cache/metadata_registry/RF_classification_report.json - md5: 47d86b5779e035dc9ff38d38330cd9fd - size: 237 - - path: .cache/metadata_registry/XGBOOST_classification_report.json - md5: 137fe7ce77c7dc4fdc948e11885f5557 - size: 237 diff --git a/dags/dvc/train/dvc.yaml b/dags/dvc/train/dvc.yaml deleted file mode 100644 index cbd5d17..0000000 --- a/dags/dvc/train/dvc.yaml +++ /dev/null @@ -1,36 +0,0 @@ -stages: - download: - wdir: ../../../ - cmd: pyrorisks download dataset - deps: - - pyro_risks/pipeline/load.py - outs: - - .cache/data_registry/merged_era_viirs.csv - train_xgboost: - wdir: ../../../ - cmd: pyrorisks train --model XGBOOST --destination .cache/model_registry - deps: - - pyro_risks/pipeline/train.py - - .cache/data_registry/merged_era_viirs.csv - outs: - - .cache/model_registry/XGBOOST.joblib - train_rf: - wdir: ../../../ - cmd: pyrorisks train --model RF --destination .cache/model_registry - deps: - - pyro_risks/pipeline/train.py - - .cache/data_registry/merged_era_viirs.csv - outs: - - .cache/model_registry/RF.joblib - evaluate: - wdir: ../../../ - cmd: | - pyrorisks evaluate --pipeline .cache/model_registry/RF.joblib --threshold .cache/model_registry/RF_threshold.json --prefix RF --destination .cache/metadata_registry - pyrorisks evaluate --pipeline .cache/model_registry/XGBOOST.joblib --threshold .cache/model_registry/XGBOOST_threshold.json --prefix XGBOOST --destination .cache/metadata_registry - deps: - - pyro_risks/pipeline/train.py - - .cache/model_registry/XGBOOST.joblib - - .cache/model_registry/RF.joblib - metrics: - - .cache/metadata_registry/XGBOOST_classification_report.json - - .cache/metadata_registry/RF_classification_report.json diff --git a/docker-compose.yml b/docker-compose.yml index a05ba86..cb1a624 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,7 +5,7 @@ services: build: . command: uvicorn app.main:app --reload --workers 1 --host 0.0.0.0 --port 8000 volumes: - - ./app/:/usr/src/app/app/ + - ./app/:/app/app/ ports: - ${PORT}:8000 environment: diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index 6459c56..0000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -sphinx -sphinx-rtd-theme==0.4.3 -myst-parser==0.12.10 -sphinx-autobuild==2020.9.1 diff --git a/docs/source/conf.py b/docs/source/conf.py index 44e7639..7f2a3c1 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -20,7 +20,7 @@ # sys.path.insert(0, os.path.abspath('.')) import os import sys -import pyro_risks +import pyrorisks import sphinx_rtd_theme from datetime import datetime @@ -29,17 +29,16 @@ from sphinx import addnodes -sys.path.insert(0, os.path.abspath("../../pyro_risks")) # -- Project information ----------------------------------------------------- master_doc = "index" -project = "pyro_risks" +project = "pyrorisks" copyright = f"{datetime.now().year}, Pyronear Contributors" author = "Pyronear Contributors" # The full version, including alpha/beta/rc tags -version = pyro_risks.__version__ -release = pyro_risks.__version__ + "-git" +version = "0.1.0" +release = "0.1.0" + "-git" # -- General configuration --------------------------------------------------- diff --git a/docs/source/index.rst b/docs/source/index.rst index dfff1bf..9efb834 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -17,7 +17,7 @@ Pyronear Wildfire Risk Forecasting Documentation

The pyro-risks project aims at providing the pyronear-platform with a machine learning based wildfire forecasting capability. -The :mod:`pyro_risks` package aggregates pre-processing pipelines and models for wildfire forecasting. +The :mod:`pyrorisks` package aggregates pre-processing pipelines and models for wildfire forecasting. .. toctree:: @@ -35,17 +35,6 @@ The :mod:`pyro_risks` package aggregates pre-processing pipelines and models for overview/datasets/C3S-ECMWF_ERA5T overview/datasets/C3S-ECMWF_ERA5LAND -.. toctree:: - :maxdepth: 1 - :caption: Pyro Risks Package References - - modules/datasets/modules - modules/models/modules - modules/pipeline/modules - -.. automodule:: pyro_risks - :members: - .. toctree:: :maxdepth: 1 diff --git a/docs/source/modules/datasets/bdiff.rst b/docs/source/modules/datasets/bdiff.rst deleted file mode 100644 index 79cdd2e..0000000 --- a/docs/source/modules/datasets/bdiff.rst +++ /dev/null @@ -1,7 +0,0 @@ -BDIFF module -============= - -.. automodule:: pyro_risks.datasets.wildfires - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/modules/datasets/copernicus_era.rst b/docs/source/modules/datasets/copernicus_era.rst deleted file mode 100644 index 5c4b16d..0000000 --- a/docs/source/modules/datasets/copernicus_era.rst +++ /dev/null @@ -1,7 +0,0 @@ -Copernicus - ERA5 module -========================= - -.. automodule:: pyro_risks.datasets.ERA5 - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/source/modules/datasets/copernicus_fwi.rst b/docs/source/modules/datasets/copernicus_fwi.rst deleted file mode 100644 index db02abe..0000000 --- a/docs/source/modules/datasets/copernicus_fwi.rst +++ /dev/null @@ -1,7 +0,0 @@ -Copernicus - FWI module -======================== - -.. automodule:: pyro_risks.datasets.fwi - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/source/modules/datasets/datasets.rst b/docs/source/modules/datasets/datasets.rst deleted file mode 100644 index 655d882..0000000 --- a/docs/source/modules/datasets/datasets.rst +++ /dev/null @@ -1,34 +0,0 @@ - -pyro\_risks.datasets.datasets\_mergers module ---------------------------------------------- - -.. automodule:: pyro_risks.datasets.datasets_mergers - :members: - :undoc-members: - :show-inheritance: - - -pyro\_risks.datasets.era\_fwi\_viirs module -------------------------------------------- - -.. automodule:: pyro_risks.datasets.era_fwi_viirs - :members: - :undoc-members: - :show-inheritance: - - -pyro\_risks.datasets.queries\_api module ----------------------------------------- - -.. automodule:: pyro_risks.datasets.queries_api - :members: - :undoc-members: - :show-inheritance: - -pyro\_risks.datasets.utils module ---------------------------------- - -.. automodule:: pyro_risks.datasets.utils - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/modules/datasets/masks.rst b/docs/source/modules/datasets/masks.rst deleted file mode 100644 index ace1bbf..0000000 --- a/docs/source/modules/datasets/masks.rst +++ /dev/null @@ -1,7 +0,0 @@ -Masks module -============ - -.. automodule:: pyro_risks.datasets.masks - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/source/modules/datasets/modules.rst b/docs/source/modules/datasets/modules.rst deleted file mode 100644 index 3273892..0000000 --- a/docs/source/modules/datasets/modules.rst +++ /dev/null @@ -1,18 +0,0 @@ -pyro\_risks.datasets -==================== - -The datasets subpackage contains modules defining functions for downloading and processing publicly available weather and fire datasets. - -The following modules are available: - - -.. toctree:: - :maxdepth: 4 - - masks - nasa_firms - copernicus_fwi - copernicus_era - noaa_ncei - bdiff - datasets diff --git a/docs/source/modules/datasets/nasa_firms.rst b/docs/source/modules/datasets/nasa_firms.rst deleted file mode 100644 index d669c95..0000000 --- a/docs/source/modules/datasets/nasa_firms.rst +++ /dev/null @@ -1,7 +0,0 @@ -NASA FIRMS - Active Fire module -=============================== - -.. automodule:: pyro_risks.datasets.nasa_wildfires - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/source/modules/datasets/noaa_ncei.rst b/docs/source/modules/datasets/noaa_ncei.rst deleted file mode 100644 index a633a52..0000000 --- a/docs/source/modules/datasets/noaa_ncei.rst +++ /dev/null @@ -1,7 +0,0 @@ -NOAA NCEI module -================= - -.. automodule:: pyro_risks.datasets.weather - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/source/modules/models/modules.rst b/docs/source/modules/models/modules.rst deleted file mode 100644 index 29d5929..0000000 --- a/docs/source/modules/models/modules.rst +++ /dev/null @@ -1,12 +0,0 @@ -pyro\_risks.models -================== - -The models subpackage contains modules defining steps of the traininig and scoring sklearn pipelines. -The following modules are available: - -.. toctree:: - :maxdepth: 4 - - pipelines - transformers - utils \ No newline at end of file diff --git a/docs/source/modules/models/pipelines.rst b/docs/source/modules/models/pipelines.rst deleted file mode 100644 index b66c7a5..0000000 --- a/docs/source/modules/models/pipelines.rst +++ /dev/null @@ -1,10 +0,0 @@ -Pipelines module -================ - -The pipelines module contains the definitions of our scoring pipelines. The risk scoring pipelines are implemented using the `imbalanced-learn `_ -Pipeline allowing for defining sequences of **resampling, preprocessing and modeling steps as one estimators**. See scikit-learn -`Pipelines and composite estimators `_ for more information. - - -.. literalinclude:: ../../../../pyro_risks/models/pipelines.py - :language: PYTHON \ No newline at end of file diff --git a/docs/source/modules/models/transformers.rst b/docs/source/modules/models/transformers.rst deleted file mode 100644 index acd461c..0000000 --- a/docs/source/modules/models/transformers.rst +++ /dev/null @@ -1,11 +0,0 @@ -Transformers module -=================== - -The pipelines module contains the definitions of our scikit-learn compliant preprocessing steps i.e. transformers. -Transformers are estimators supporting transform and/or fit_transform methods see `Dataset transformations `_ , -`scikit-lego `_ and `feature-engine `_ for collections of transformers. - -.. automodule:: pyro_risks.models.transformers - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/source/modules/models/utils.rst b/docs/source/modules/models/utils.rst deleted file mode 100644 index 31c9fb5..0000000 --- a/docs/source/modules/models/utils.rst +++ /dev/null @@ -1,7 +0,0 @@ -Utils module -============ - -.. automodule:: pyro_risks.models.utils - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/source/modules/pipeline/evaluate.rst b/docs/source/modules/pipeline/evaluate.rst deleted file mode 100644 index b03a7ab..0000000 --- a/docs/source/modules/pipeline/evaluate.rst +++ /dev/null @@ -1,7 +0,0 @@ -Evaluate module -=============== - -.. automodule:: pyro_risks.pipeline.evaluate - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/source/modules/pipeline/load.rst b/docs/source/modules/pipeline/load.rst deleted file mode 100644 index 0de015b..0000000 --- a/docs/source/modules/pipeline/load.rst +++ /dev/null @@ -1,7 +0,0 @@ -Load module -=============== - -.. automodule:: pyro_risks.pipeline.load - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/source/modules/pipeline/modules.rst b/docs/source/modules/pipeline/modules.rst deleted file mode 100644 index d787d2e..0000000 --- a/docs/source/modules/pipeline/modules.rst +++ /dev/null @@ -1,13 +0,0 @@ -pyro\_risks.pipeline -==================== - -The pipeline subpackage contains modules defining the helper functions for each stage of the model lifecycle. -The following modules are available: - -.. toctree:: - :maxdepth: 4 - - load - train - evaluate - predict \ No newline at end of file diff --git a/docs/source/modules/pipeline/predict.rst b/docs/source/modules/pipeline/predict.rst deleted file mode 100644 index e967c71..0000000 --- a/docs/source/modules/pipeline/predict.rst +++ /dev/null @@ -1,7 +0,0 @@ -Predict module -============== - -.. automodule:: pyro_risks.pipeline.predict - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/modules/pipeline/train.rst b/docs/source/modules/pipeline/train.rst deleted file mode 100644 index 91be723..0000000 --- a/docs/source/modules/pipeline/train.rst +++ /dev/null @@ -1,7 +0,0 @@ -Train module -============ - -.. automodule:: pyro_risks.pipeline.train - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 18a36f4..0000000 --- a/mypy.ini +++ /dev/null @@ -1,76 +0,0 @@ -[mypy] -files = pyro_risks/*.py -show_error_codes = True -pretty = True - -[mypy-dotenv] -ignore_missing_imports = True - -[mypy-xarray] -ignore_missing_imports = True - -[mypy-pandas] -ignore_missing_imports = True - -[mypy-sklearn.ensemble] -ignore_missing_imports = True - -[mypy-sklearn.model_selection] -ignore_missing_imports = True - -[mypy-sklearn.metrics] -ignore_missing_imports = True - -[mypy-sklearn.base] -ignore_missing_imports = True - -[mypy-sklearn.impute] -ignore_missing_imports = True - -[mypy-sklearn.utils] -ignore_missing_imports = True - -[mypy-xgboost] -ignore_missing_imports = True - -[mypy-numpy] -ignore_missing_imports = True - -[mypy-geopandas] -ignore_missing_imports = True - -[mypy-cdsapi] -ignore_missing_imports = True - -[mypy-urllib3] -ignore_missing_imports = True - -[mypy-joblib] -ignore_missing_imports = True - -[mypy-imblearn] -ignore_missing_imports = True - -[mypy-imblearn.pipeline] -ignore_missing_imports = True - -[mypy-matplotlib] -ignore_missing_imports = True - -[mypy-matplotlib.pyplot] -ignore_missing_imports = True - -[mypy-plot_metric.functions] -ignore_missing_imports = True - -[mypy-shapely] -ignore_missing_imports = True - -[mypy-shapely.geometry] -ignore_missing_imports = True - -[mypy-scipy] -ignore_missing_imports = True - -[mypy-netCDF4] -ignore_missing_imports = True diff --git a/pyro_risks/notebooks/EFFIS FWI formatting.ipynb b/notebooks/EFFIS FWI formatting.ipynb similarity index 100% rename from pyro_risks/notebooks/EFFIS FWI formatting.ipynb rename to notebooks/EFFIS FWI formatting.ipynb diff --git a/pyro_risks/notebooks/requetes_apis.ipynb b/notebooks/requetes_apis.ipynb similarity index 100% rename from pyro_risks/notebooks/requetes_apis.ipynb rename to notebooks/requetes_apis.ipynb diff --git a/pyro_risks/notebooks/s3_tutorial.ipynb b/notebooks/s3_tutorial.ipynb similarity index 100% rename from pyro_risks/notebooks/s3_tutorial.ipynb rename to notebooks/s3_tutorial.ipynb diff --git a/pyro_risks/notebooks/tif_explorer.ipynb b/notebooks/tif_explorer.ipynb similarity index 100% rename from pyro_risks/notebooks/tif_explorer.ipynb rename to notebooks/tif_explorer.ipynb diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..213b6de --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2467 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "affine" +version = "2.4.0" +description = "Matrices describing affine transformation of the plane" +optional = false +python-versions = ">=3.7" +files = [ + {file = "affine-2.4.0-py3-none-any.whl", hash = "sha256:8a3df80e2b2378aef598a83c1392efd47967afec4242021a0b06b4c7cbc61a92"}, + {file = "affine-2.4.0.tar.gz", hash = "sha256:a24d818d6a836c131976d22f8c27b8d3ca32d0af64c1d8d29deb7bafa4da1eea"}, +] + +[package.extras] +dev = ["coveralls", "flake8", "pydocstyle"] +test = ["pytest (>=4.6)", "pytest-cov"] + +[[package]] +name = "alabaster" +version = "0.7.16" +description = "A light, configurable Sphinx theme" +optional = false +python-versions = ">=3.9" +files = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.4.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "babel" +version = "2.15.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "boto3" +version = "1.34.141" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.34.141-py3-none-any.whl", hash = "sha256:f906c797a02d37a3b88fe4c97e4d72b387e19ab6f3096d2f573578f020fd9bf4"}, + {file = "boto3-1.34.141.tar.gz", hash = "sha256:947c7a94ac3a2131142914a53afc3b1c5a572d6a79515bf2f0473188817cfcd6"}, +] + +[package.dependencies] +botocore = ">=1.34.141,<1.35.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.34.141" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.34.141-py3-none-any.whl", hash = "sha256:0e661a452c0489b6d62a9c91fed3320d5690a524489a7e50afc8efadb994dba8"}, + {file = "botocore-1.34.141.tar.gz", hash = "sha256:d2815c09037039a287461eddc07af895d798bc897e6ba4b08f5a12eaa9886ff1"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.20.11)"] + +[[package]] +name = "certifi" +version = "2024.7.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +optional = false +python-versions = "*" +files = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "cligj" +version = "0.7.2" +description = "Click params for commmand line interfaces to GeoJSON" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, <4" +files = [ + {file = "cligj-0.7.2-py3-none-any.whl", hash = "sha256:c1ca117dbce1fe20a5809dc96f01e1c2840f6dcc939b3ddbb1111bf330ba82df"}, + {file = "cligj-0.7.2.tar.gz", hash = "sha256:a4bc13d623356b373c2c27c53dbd9c68cae5d526270bfa71f6c6fa69669c6b27"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +test = ["pytest-cov"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.5.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"}, + {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"}, + {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"}, + {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"}, + {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"}, + {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"}, + {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"}, + {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"}, + {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, + {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, + {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, + {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, + {file = "coverage-7.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882"}, + {file = "coverage-7.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f"}, + {file = "coverage-7.5.4-cp38-cp38-win32.whl", hash = "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f"}, + {file = "coverage-7.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633"}, + {file = "coverage-7.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088"}, + {file = "coverage-7.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7"}, + {file = "coverage-7.5.4-cp39-cp39-win32.whl", hash = "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace"}, + {file = "coverage-7.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d"}, + {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, + {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "docutils" +version = "0.20.1" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] + +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "exceptiongroup" +version = "1.2.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastapi" +version = "0.111.0" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.111.0-py3-none-any.whl", hash = "sha256:97ecbf994be0bcbdadedf88c3150252bed7b2087075ac99735403b1b76cc8fc0"}, + {file = "fastapi-0.111.0.tar.gz", hash = "sha256:b9db9dd147c91cb8b769f7183535773d8741dd46f9dc6676cd82eab510228cd7"}, +] + +[package.dependencies] +email_validator = ">=2.0.0" +fastapi-cli = ">=0.0.2" +httpx = ">=0.23.0" +jinja2 = ">=2.11.2" +orjson = ">=3.2.1" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +python-multipart = ">=0.0.7" +starlette = ">=0.37.2,<0.38.0" +typing-extensions = ">=4.8.0" +ujson = ">=4.0.1,<4.0.2 || >4.0.2,<4.1.0 || >4.1.0,<4.2.0 || >4.2.0,<4.3.0 || >4.3.0,<5.0.0 || >5.0.0,<5.1.0 || >5.1.0" +uvicorn = {version = ">=0.12.0", extras = ["standard"]} + +[package.extras] +all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "fastapi-cli" +version = "0.0.4" +description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi_cli-0.0.4-py3-none-any.whl", hash = "sha256:a2552f3a7ae64058cdbb530be6fa6dbfc975dc165e4fa66d224c3d396e25e809"}, + {file = "fastapi_cli-0.0.4.tar.gz", hash = "sha256:e2e9ffaffc1f7767f488d6da34b6f5a377751c996f397902eb6abb99a67bde32"}, +] + +[package.dependencies] +typer = ">=0.12.3" + +[package.extras] +standard = ["fastapi", "uvicorn[standard] (>=0.15.0)"] + +[[package]] +name = "filelock" +version = "3.15.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "geopandas" +version = "1.0.1" +description = "Geographic pandas extensions" +optional = false +python-versions = ">=3.9" +files = [ + {file = "geopandas-1.0.1-py3-none-any.whl", hash = "sha256:01e147d9420cc374d26f51fc23716ac307f32b49406e4bd8462c07e82ed1d3d6"}, + {file = "geopandas-1.0.1.tar.gz", hash = "sha256:b8bf70a5534588205b7a56646e2082fb1de9a03599651b3d80c99ea4c2ca08ab"}, +] + +[package.dependencies] +numpy = ">=1.22" +packaging = "*" +pandas = ">=1.4.0" +pyogrio = ">=0.7.2" +pyproj = ">=3.3.0" +shapely = ">=2.0.0" + +[package.extras] +all = ["GeoAlchemy2", "SQLAlchemy (>=1.3)", "folium", "geopy", "mapclassify", "matplotlib (>=3.5.0)", "psycopg-binary (>=3.1.0)", "pyarrow (>=8.0.0)", "xyzservices"] +dev = ["black", "codecov", "pre-commit", "pytest (>=3.1.0)", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httptools" +version = "0.6.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, + {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, + {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, + {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, + {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, + {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, + {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "identify" +version = "2.6.0" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "linkify-it-py" +version = "2.0.3" +description = "Links recognition library with FULL unicode support." +optional = false +python-versions = ">=3.7" +files = [ + {file = "linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048"}, + {file = "linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79"}, +] + +[package.dependencies] +uc-micro-py = "*" + +[package.extras] +benchmark = ["pytest", "pytest-benchmark"] +dev = ["black", "flake8", "isort", "pre-commit", "pyproject-flake8"] +doc = ["myst-parser", "sphinx", "sphinx-book-theme"] +test = ["coverage", "pytest", "pytest-cov"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.1" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.1-py3-none-any.whl", hash = "sha256:1020dfe4e6bfc2c79fb49ae4e3f5b297f5ccd20f010187acc52af2921e27dc6a"}, + {file = "mdit_py_plugins-0.4.1.tar.gz", hash = "sha256:834b8ac23d1cd60cec703646ffd22ae97b7955a6d596eb1d304be1e251ae499c"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.10.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, + {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, + {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, + {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, + {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, + {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, + {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, + {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, + {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, + {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, + {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, + {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, + {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, + {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, + {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, + {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, + {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, + {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "myst-parser" +version = "3.0.1" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +optional = false +python-versions = ">=3.8" +files = [ + {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, + {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, +] + +[package.dependencies] +docutils = ">=0.18,<0.22" +jinja2 = "*" +markdown-it-py = ">=3.0,<4.0" +mdit-py-plugins = ">=0.4,<1.0" +pyyaml = "*" +sphinx = ">=6,<8" + +[package.extras] +code-style = ["pre-commit (>=3.0,<4.0)"] +linkify = ["linkify-it-py (>=2.0,<3.0)"] +rtd = ["ipython", "sphinx (>=7)", "sphinx-autodoc2 (>=0.5.0,<0.6.0)", "sphinx-book-theme (>=1.1,<2.0)", "sphinx-copybutton", "sphinx-design", "sphinx-pyscript", "sphinx-tippy (>=0.4.3)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.9.0,<0.10.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "defusedxml", "pytest (>=8,<9)", "pytest-cov", "pytest-param-files (>=0.6.0,<0.7.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=8,<9)", "pytest-param-files (>=0.6.0,<0.7.0)"] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "numpy" +version = "2.0.0" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:04494f6ec467ccb5369d1808570ae55f6ed9b5809d7f035059000a37b8d7e86f"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2635dbd200c2d6faf2ef9a0d04f0ecc6b13b3cad54f7c67c61155138835515d2"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0a43f0974d501842866cc83471bdb0116ba0dffdbaac33ec05e6afed5b615238"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8d83bb187fb647643bd56e1ae43f273c7f4dbcdf94550d7938cfc32566756514"}, + {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e843d186c8fb1b102bef3e2bc35ef81160ffef3194646a7fdd6a73c6b97196"}, + {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7696c615765091cc5093f76fd1fa069870304beaccfd58b5dcc69e55ef49c1"}, + {file = "numpy-2.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b4c76e3d4c56f145d41b7b6751255feefae92edbc9a61e1758a98204200f30fc"}, + {file = "numpy-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd3a644e4807e73b4e1867b769fbf1ce8c5d80e7caaef0d90dcdc640dfc9787"}, + {file = "numpy-2.0.0-cp310-cp310-win32.whl", hash = "sha256:cee6cc0584f71adefe2c908856ccc98702baf95ff80092e4ca46061538a2ba98"}, + {file = "numpy-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:ed08d2703b5972ec736451b818c2eb9da80d66c3e84aed1deeb0c345fefe461b"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad0c86f3455fbd0de6c31a3056eb822fc939f81b1618f10ff3406971893b62a5"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7f387600d424f91576af20518334df3d97bc76a300a755f9a8d6e4f5cadd289"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:34f003cb88b1ba38cb9a9a4a3161c1604973d7f9d5552c38bc2f04f829536609"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b6f6a8f45d0313db07d6d1d37bd0b112f887e1369758a5419c0370ba915b3871"}, + {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f64641b42b2429f56ee08b4f427a4d2daf916ec59686061de751a55aafa22e4"}, + {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7039a136017eaa92c1848152827e1424701532ca8e8967fe480fe1569dae581"}, + {file = "numpy-2.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46e161722e0f619749d1cd892167039015b2c2817296104487cd03ed4a955995"}, + {file = "numpy-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0e50842b2295ba8414c8c1d9d957083d5dfe9e16828b37de883f51fc53c4016f"}, + {file = "numpy-2.0.0-cp311-cp311-win32.whl", hash = "sha256:2ce46fd0b8a0c947ae047d222f7136fc4d55538741373107574271bc00e20e8f"}, + {file = "numpy-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd6acc766814ea6443628f4e6751d0da6593dae29c08c0b2606164db026970c"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:354f373279768fa5a584bac997de6a6c9bc535c482592d7a813bb0c09be6c76f"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d2f62e55a4cd9c58c1d9a1c9edaedcd857a73cb6fda875bf79093f9d9086f85"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:1e72728e7501a450288fc8e1f9ebc73d90cfd4671ebbd631f3e7857c39bd16f2"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:84554fc53daa8f6abf8e8a66e076aff6ece62de68523d9f665f32d2fc50fd66e"}, + {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73aafd1afca80afecb22718f8700b40ac7cab927b8abab3c3e337d70e10e5a2"}, + {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d9f7d256fbc804391a7f72d4a617302b1afac1112fac19b6c6cec63fe7fe8a"}, + {file = "numpy-2.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ec84b9ba0654f3b962802edc91424331f423dcf5d5f926676e0150789cb3d95"}, + {file = "numpy-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:feff59f27338135776f6d4e2ec7aeeac5d5f7a08a83e80869121ef8164b74af9"}, + {file = "numpy-2.0.0-cp312-cp312-win32.whl", hash = "sha256:c5a59996dc61835133b56a32ebe4ef3740ea5bc19b3983ac60cc32be5a665d54"}, + {file = "numpy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e61155fae27570692ad1d327e81c6cf27d535a5d7ef97648a17d922224b216de"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4554eb96f0fd263041baf16cf0881b3f5dafae7a59b1049acb9540c4d57bc8cb"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:903703372d46bce88b6920a0cd86c3ad82dae2dbef157b5fc01b70ea1cfc430f"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:3e8e01233d57639b2e30966c63d36fcea099d17c53bf424d77f088b0f4babd86"}, + {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cde1753efe513705a0c6d28f5884e22bdc30438bf0085c5c486cdaff40cd67a"}, + {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821eedb7165ead9eebdb569986968b541f9908979c2da8a4967ecac4439bae3d"}, + {file = "numpy-2.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a1712c015831da583b21c5bfe15e8684137097969c6d22e8316ba66b5baabe4"}, + {file = "numpy-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9c27f0946a3536403efb0e1c28def1ae6730a72cd0d5878db38824855e3afc44"}, + {file = "numpy-2.0.0-cp39-cp39-win32.whl", hash = "sha256:63b92c512d9dbcc37f9d81b123dec99fdb318ba38c8059afc78086fe73820275"}, + {file = "numpy-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f6bed7f840d44c08ebdb73b1825282b801799e325bcbdfa6bc5c370e5aecc65"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9416a5c2e92ace094e9f0082c5fd473502c91651fb896bc17690d6fc475128d6"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:17067d097ed036636fa79f6a869ac26df7db1ba22039d962422506640314933a"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ecb5b0582cd125f67a629072fed6f83562d9dd04d7e03256c9829bdec027ad"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cef04d068f5fb0518a77857953193b6bb94809a806bd0a14983a8f12ada060c9"}, + {file = "numpy-2.0.0.tar.gz", hash = "sha256:cf5d1c9e6837f8af9f92b6bd3e86d513cdc11f60fd62185cc49ec7d1aba34864"}, +] + +[[package]] +name = "orjson" +version = "3.10.6" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"}, + {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"}, + {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"}, + {file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"}, + {file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"}, + {file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"}, + {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"}, + {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"}, + {file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"}, + {file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"}, + {file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"}, + {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"}, + {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"}, + {file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"}, + {file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"}, + {file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"}, + {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"}, + {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"}, + {file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"}, + {file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"}, + {file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"}, + {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"}, + {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"}, + {file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"}, + {file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"}, + {file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.7.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, + {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pydantic" +version = "2.8.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.20.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyogrio" +version = "0.9.0" +description = "Vectorized spatial vector file format I/O using GDAL/OGR" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyogrio-0.9.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:1a495ca4fb77c69595747dd688f8f17bb7d2ea9cd86603aa71c7fc98cc8b4174"}, + {file = "pyogrio-0.9.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:6dc94a67163218581c7df275223488ac9b31dc582ccd756da607c3338908566c"}, + {file = "pyogrio-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e38c3c6d37cf2cc969407e4d051dcb507cfd948eb26c7b0840c4f7d7d4a71bd4"}, + {file = "pyogrio-0.9.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:f47c9b6818cc0f420015b672d5dcc488530a5ee63e5ba35a184957b21ea3922a"}, + {file = "pyogrio-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb04bd80964428491951766452f0071b0bc37c7d38c45ef02502dbd83e5d74a0"}, + {file = "pyogrio-0.9.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f5d80eb846be4fc4e642cbedc1ed0c143e8d241653382ecc76a7620bbd2a5c3a"}, + {file = "pyogrio-0.9.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:2f2ec57ab74785db9c2bf47c0a6731e5175595a13f8253f06fa84136adb310a9"}, + {file = "pyogrio-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a289584da6df7ca318947301fe0ba9177e7f863f63110e087c80ac5f3658de8"}, + {file = "pyogrio-0.9.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:13642608a1cd67797ae8b5d792b0518d8ef3eb76506c8232ab5eaa1ea1159dff"}, + {file = "pyogrio-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:9440466c0211ac81f3417f274da5903f15546b486f76b2f290e74a56aaf0e737"}, + {file = "pyogrio-0.9.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2e98913fa183f7597c609e774820a149e9329fd2a0f8d33978252fbd00ae87e6"}, + {file = "pyogrio-0.9.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:f8bf193269ea9d347ac3ddada960a59f1ab2e4a5c009be95dc70e6505346b2fc"}, + {file = "pyogrio-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f964002d445521ad5b8e732a6b5ef0e2d2be7fe566768e5075c1d71398da64a"}, + {file = "pyogrio-0.9.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:083351b258b3e08b6c6085dac560bd321b68de5cb4a66229095da68d5f3d696b"}, + {file = "pyogrio-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:796e4f6a4e769b2eb6fea9a10546ea4bdee16182d1e29802b4d6349363c3c1d7"}, + {file = "pyogrio-0.9.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:7fcafed24371fe6e23bcf5abebbb29269f8d79915f1dd818ac85453657ea714a"}, + {file = "pyogrio-0.9.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:30cbeeaedb9bced7012487e7438919aa0c7dfba18ac3d4315182b46eb3139b9d"}, + {file = "pyogrio-0.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4da0b9deb380bd9a200fee13182c4f95b02b4c554c923e2e0032f32aaf1439ed"}, + {file = "pyogrio-0.9.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4e0f90a6c3771ee1f1fea857778b4b6a1b64000d851b819f435f9091b3c38c60"}, + {file = "pyogrio-0.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:959022f3ad04053f8072dc9a2ad110c46edd9e4f92352061ba835fc91df3ca96"}, + {file = "pyogrio-0.9.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:2829615cf58b1b24a9f96fea42abedaa1a800dd351c67374cc2f6341138608f3"}, + {file = "pyogrio-0.9.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:17420febc17651876d5140b54b24749aa751d482b5f9ef6267b8053e6e962876"}, + {file = "pyogrio-0.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a2fcaa269031dbbc8ebd91243c6452c5d267d6df939c008ab7533413c9cf92d"}, + {file = "pyogrio-0.9.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:019731a856a9abfe909e86f50eb13f8362f6742337caf757c54b7c8acfe75b89"}, + {file = "pyogrio-0.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:d668cb10f2bf6ccd7c402f91e8b06290722dd09dbe265ae95b2c13db29ebeba0"}, + {file = "pyogrio-0.9.0.tar.gz", hash = "sha256:6a6fa2e8cf95b3d4a7c0fac48bce6e5037579e28d3eb33b53349d6e11f15e5a8"}, +] + +[package.dependencies] +certifi = "*" +numpy = "*" +packaging = "*" + +[package.extras] +benchmark = ["pytest-benchmark"] +dev = ["Cython"] +geopandas = ["geopandas"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyproj" +version = "3.6.1" +description = "Python interface to PROJ (cartographic projections and coordinate transformations library)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pyproj-3.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab7aa4d9ff3c3acf60d4b285ccec134167a948df02347585fdd934ebad8811b4"}, + {file = "pyproj-3.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4bc0472302919e59114aa140fd7213c2370d848a7249d09704f10f5b062031fe"}, + {file = "pyproj-3.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5279586013b8d6582e22b6f9e30c49796966770389a9d5b85e25a4223286cd3f"}, + {file = "pyproj-3.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fafd1f3eb421694857f254a9bdbacd1eb22fc6c24ca74b136679f376f97d35"}, + {file = "pyproj-3.6.1-cp310-cp310-win32.whl", hash = "sha256:c41e80ddee130450dcb8829af7118f1ab69eaf8169c4bf0ee8d52b72f098dc2f"}, + {file = "pyproj-3.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:db3aedd458e7f7f21d8176f0a1d924f1ae06d725228302b872885a1c34f3119e"}, + {file = "pyproj-3.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ebfbdbd0936e178091309f6cd4fcb4decd9eab12aa513cdd9add89efa3ec2882"}, + {file = "pyproj-3.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:447db19c7efad70ff161e5e46a54ab9cc2399acebb656b6ccf63e4bc4a04b97a"}, + {file = "pyproj-3.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7e13c40183884ec7f94eb8e0f622f08f1d5716150b8d7a134de48c6110fee85"}, + {file = "pyproj-3.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ad699e0c830e2b8565afe42bd58cc972b47d829b2e0e48ad9638386d994915"}, + {file = "pyproj-3.6.1-cp311-cp311-win32.whl", hash = "sha256:8b8acc31fb8702c54625f4d5a2a6543557bec3c28a0ef638778b7ab1d1772132"}, + {file = "pyproj-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:38a3361941eb72b82bd9a18f60c78b0df8408416f9340521df442cebfc4306e2"}, + {file = "pyproj-3.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1e9fbaf920f0f9b4ee62aab832be3ae3968f33f24e2e3f7fbb8c6728ef1d9746"}, + {file = "pyproj-3.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d227a865356f225591b6732430b1d1781e946893789a609bb34f59d09b8b0f8"}, + {file = "pyproj-3.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83039e5ae04e5afc974f7d25ee0870a80a6bd6b7957c3aca5613ccbe0d3e72bf"}, + {file = "pyproj-3.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb059ba3bced6f6725961ba758649261d85ed6ce670d3e3b0a26e81cf1aa8d"}, + {file = "pyproj-3.6.1-cp312-cp312-win32.whl", hash = "sha256:2d6ff73cc6dbbce3766b6c0bce70ce070193105d8de17aa2470009463682a8eb"}, + {file = "pyproj-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:7a27151ddad8e1439ba70c9b4b2b617b290c39395fa9ddb7411ebb0eb86d6fb0"}, + {file = "pyproj-3.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ba1f9b03d04d8cab24d6375609070580a26ce76eaed54631f03bab00a9c737b"}, + {file = "pyproj-3.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18faa54a3ca475bfe6255156f2f2874e9a1c8917b0004eee9f664b86ccc513d3"}, + {file = "pyproj-3.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd43bd9a9b9239805f406fd82ba6b106bf4838d9ef37c167d3ed70383943ade1"}, + {file = "pyproj-3.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50100b2726a3ca946906cbaa789dd0749f213abf0cbb877e6de72ca7aa50e1ae"}, + {file = "pyproj-3.6.1-cp39-cp39-win32.whl", hash = "sha256:9274880263256f6292ff644ca92c46d96aa7e57a75c6df3f11d636ce845a1877"}, + {file = "pyproj-3.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:36b64c2cb6ea1cc091f329c5bd34f9c01bb5da8c8e4492c709bda6a09f96808f"}, + {file = "pyproj-3.6.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd93c1a0c6c4aedc77c0fe275a9f2aba4d59b8acf88cebfc19fe3c430cfabf4f"}, + {file = "pyproj-3.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6420ea8e7d2a88cb148b124429fba8cd2e0fae700a2d96eab7083c0928a85110"}, + {file = "pyproj-3.6.1.tar.gz", hash = "sha256:44aa7c704c2b7d8fb3d483bbf75af6cb2350d30a63b144279a09b75fead501bf"}, +] + +[package.dependencies] +certifi = "*" + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.9" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, + {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, +] + +[package.extras] +dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rasterio" +version = "1.3.10" +description = "Fast and direct raster I/O for use with Numpy and SciPy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rasterio-1.3.10-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:0bbd62b45a35cab53cb7fe72419e823e47ab31ee2d055af8e21dc7f37fe5ed6c"}, +] + +[package.dependencies] +affine = "*" +attrs = "*" +certifi = "*" +click = ">=4.0" +click-plugins = "*" +cligj = ">=0.5" +numpy = "*" +setuptools = "*" +snuggs = ">=1.4.1" + +[package.extras] +all = ["boto3 (>=1.2.4)", "ghp-import", "hypothesis", "ipython (>=2.0)", "matplotlib", "numpydoc", "packaging", "pytest (>=2.8.2)", "pytest-cov (>=2.2.0)", "shapely", "sphinx", "sphinx-rtd-theme"] +docs = ["ghp-import", "numpydoc", "sphinx", "sphinx-rtd-theme"] +ipython = ["ipython (>=2.0)"] +plot = ["matplotlib"] +s3 = ["boto3 (>=1.2.4)"] +test = ["boto3 (>=1.2.4)", "hypothesis", "packaging", "pytest (>=2.8.2)", "pytest-cov (>=2.2.0)", "shapely"] + +[package.source] +type = "file" +url = "build/rasterio-1.3.10-cp39-cp39-manylinux2014_x86_64.whl" + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "ruff" +version = "0.5.1" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.5.1-py3-none-linux_armv6l.whl", hash = "sha256:6ecf968fcf94d942d42b700af18ede94b07521bd188aaf2cd7bc898dd8cb63b6"}, + {file = "ruff-0.5.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:204fb0a472f00f2e6280a7c8c7c066e11e20e23a37557d63045bf27a616ba61c"}, + {file = "ruff-0.5.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d235968460e8758d1e1297e1de59a38d94102f60cafb4d5382033c324404ee9d"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38beace10b8d5f9b6bdc91619310af6d63dd2019f3fb2d17a2da26360d7962fa"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e478d2f09cf06add143cf8c4540ef77b6599191e0c50ed976582f06e588c994"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0368d765eec8247b8550251c49ebb20554cc4e812f383ff9f5bf0d5d94190b0"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3a9a9a1b582e37669b0138b7c1d9d60b9edac880b80eb2baba6d0e566bdeca4d"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdd9f723e16003623423affabcc0a807a66552ee6a29f90eddad87a40c750b78"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:be9fd62c1e99539da05fcdc1e90d20f74aec1b7a1613463ed77870057cd6bd96"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e216fc75a80ea1fbd96af94a6233d90190d5b65cc3d5dfacf2bd48c3e067d3e1"}, + {file = "ruff-0.5.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c4c2112e9883a40967827d5c24803525145e7dab315497fae149764979ac7929"}, + {file = "ruff-0.5.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dfaf11c8a116394da3b65cd4b36de30d8552fa45b8119b9ef5ca6638ab964fa3"}, + {file = "ruff-0.5.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d7ceb9b2fe700ee09a0c6b192c5ef03c56eb82a0514218d8ff700f6ade004108"}, + {file = "ruff-0.5.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:bac6288e82f6296f82ed5285f597713acb2a6ae26618ffc6b429c597b392535c"}, + {file = "ruff-0.5.1-py3-none-win32.whl", hash = "sha256:5c441d9c24ec09e1cb190a04535c5379b36b73c4bc20aa180c54812c27d1cca4"}, + {file = "ruff-0.5.1-py3-none-win_amd64.whl", hash = "sha256:b1789bf2cd3d1b5a7d38397cac1398ddf3ad7f73f4de01b1e913e2abc7dfc51d"}, + {file = "ruff-0.5.1-py3-none-win_arm64.whl", hash = "sha256:2875b7596a740cbbd492f32d24be73e545a4ce0a3daf51e4f4e609962bfd3cd2"}, + {file = "ruff-0.5.1.tar.gz", hash = "sha256:3164488aebd89b1745b47fd00604fb4358d774465f20d1fcd907f9c0fc1b0655"}, +] + +[[package]] +name = "s3transfer" +version = "0.10.2" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.8" +files = [ + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "setuptools" +version = "70.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"}, + {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "shapely" +version = "2.0.4" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shapely-2.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:011b77153906030b795791f2fdfa2d68f1a8d7e40bce78b029782ade3afe4f2f"}, + {file = "shapely-2.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9831816a5d34d5170aa9ed32a64982c3d6f4332e7ecfe62dc97767e163cb0b17"}, + {file = "shapely-2.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5c4849916f71dc44e19ed370421518c0d86cf73b26e8656192fcfcda08218fbd"}, + {file = "shapely-2.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841f93a0e31e4c64d62ea570d81c35de0f6cea224568b2430d832967536308e6"}, + {file = "shapely-2.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b4431f522b277c79c34b65da128029a9955e4481462cbf7ebec23aab61fc58"}, + {file = "shapely-2.0.4-cp310-cp310-win32.whl", hash = "sha256:92a41d936f7d6743f343be265ace93b7c57f5b231e21b9605716f5a47c2879e7"}, + {file = "shapely-2.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:30982f79f21bb0ff7d7d4a4e531e3fcaa39b778584c2ce81a147f95be1cd58c9"}, + {file = "shapely-2.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de0205cb21ad5ddaef607cda9a3191eadd1e7a62a756ea3a356369675230ac35"}, + {file = "shapely-2.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7d56ce3e2a6a556b59a288771cf9d091470116867e578bebced8bfc4147fbfd7"}, + {file = "shapely-2.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:58b0ecc505bbe49a99551eea3f2e8a9b3b24b3edd2a4de1ac0dc17bc75c9ec07"}, + {file = "shapely-2.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:790a168a808bd00ee42786b8ba883307c0e3684ebb292e0e20009588c426da47"}, + {file = "shapely-2.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4310b5494271e18580d61022c0857eb85d30510d88606fa3b8314790df7f367d"}, + {file = "shapely-2.0.4-cp311-cp311-win32.whl", hash = "sha256:63f3a80daf4f867bd80f5c97fbe03314348ac1b3b70fb1c0ad255a69e3749879"}, + {file = "shapely-2.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:c52ed79f683f721b69a10fb9e3d940a468203f5054927215586c5d49a072de8d"}, + {file = "shapely-2.0.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5bbd974193e2cc274312da16b189b38f5f128410f3377721cadb76b1e8ca5328"}, + {file = "shapely-2.0.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:41388321a73ba1a84edd90d86ecc8bfed55e6a1e51882eafb019f45895ec0f65"}, + {file = "shapely-2.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0776c92d584f72f1e584d2e43cfc5542c2f3dd19d53f70df0900fda643f4bae6"}, + {file = "shapely-2.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c75c98380b1ede1cae9a252c6dc247e6279403fae38c77060a5e6186c95073ac"}, + {file = "shapely-2.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3e700abf4a37b7b8b90532fa6ed5c38a9bfc777098bc9fbae5ec8e618ac8f30"}, + {file = "shapely-2.0.4-cp312-cp312-win32.whl", hash = "sha256:4f2ab0faf8188b9f99e6a273b24b97662194160cc8ca17cf9d1fb6f18d7fb93f"}, + {file = "shapely-2.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:03152442d311a5e85ac73b39680dd64a9892fa42bb08fd83b3bab4fe6999bfa0"}, + {file = "shapely-2.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:994c244e004bc3cfbea96257b883c90a86e8cbd76e069718eb4c6b222a56f78b"}, + {file = "shapely-2.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05ffd6491e9e8958b742b0e2e7c346635033d0a5f1a0ea083547fcc854e5d5cf"}, + {file = "shapely-2.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbdc1140a7d08faa748256438291394967aa54b40009f54e8d9825e75ef6113"}, + {file = "shapely-2.0.4-cp37-cp37m-win32.whl", hash = "sha256:5af4cd0d8cf2912bd95f33586600cac9c4b7c5053a036422b97cfe4728d2eb53"}, + {file = "shapely-2.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:464157509ce4efa5ff285c646a38b49f8c5ef8d4b340f722685b09bb033c5ccf"}, + {file = "shapely-2.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:489c19152ec1f0e5c5e525356bcbf7e532f311bff630c9b6bc2db6f04da6a8b9"}, + {file = "shapely-2.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b79bbd648664aa6f44ef018474ff958b6b296fed5c2d42db60078de3cffbc8aa"}, + {file = "shapely-2.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:674d7baf0015a6037d5758496d550fc1946f34bfc89c1bf247cabdc415d7747e"}, + {file = "shapely-2.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cd4ccecc5ea5abd06deeaab52fcdba372f649728050c6143cc405ee0c166679"}, + {file = "shapely-2.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5cdcbbe3080181498931b52a91a21a781a35dcb859da741c0345c6402bf00c"}, + {file = "shapely-2.0.4-cp38-cp38-win32.whl", hash = "sha256:55a38dcd1cee2f298d8c2ebc60fc7d39f3b4535684a1e9e2f39a80ae88b0cea7"}, + {file = "shapely-2.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:ec555c9d0db12d7fd777ba3f8b75044c73e576c720a851667432fabb7057da6c"}, + {file = "shapely-2.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9103abd1678cb1b5f7e8e1af565a652e036844166c91ec031eeb25c5ca8af0"}, + {file = "shapely-2.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:263bcf0c24d7a57c80991e64ab57cba7a3906e31d2e21b455f493d4aab534aaa"}, + {file = "shapely-2.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddf4a9bfaac643e62702ed662afc36f6abed2a88a21270e891038f9a19bc08fc"}, + {file = "shapely-2.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:485246fcdb93336105c29a5cfbff8a226949db37b7473c89caa26c9bae52a242"}, + {file = "shapely-2.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8de4578e838a9409b5b134a18ee820730e507b2d21700c14b71a2b0757396acc"}, + {file = "shapely-2.0.4-cp39-cp39-win32.whl", hash = "sha256:9dab4c98acfb5fb85f5a20548b5c0abe9b163ad3525ee28822ffecb5c40e724c"}, + {file = "shapely-2.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:31c19a668b5a1eadab82ff070b5a260478ac6ddad3a5b62295095174a8d26398"}, + {file = "shapely-2.0.4.tar.gz", hash = "sha256:5dc736127fac70009b8d309a0eeb74f3e08979e530cf7017f2f507ef62e6cfb8"}, +] + +[package.dependencies] +numpy = ">=1.14,<3" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "snuggs" +version = "1.4.7" +description = "Snuggs are s-expressions for Numpy" +optional = false +python-versions = "*" +files = [ + {file = "snuggs-1.4.7-py3-none-any.whl", hash = "sha256:988dde5d4db88e9d71c99457404773dabcc7a1c45971bfbe81900999942d9f07"}, + {file = "snuggs-1.4.7.tar.gz", hash = "sha256:501cf113fe3892e14e2fee76da5cd0606b7e149c411c271898e6259ebde2617b"}, +] + +[package.dependencies] +numpy = "*" +pyparsing = ">=2.1.6" + +[package.extras] +test = ["hypothesis", "pytest"] + +[[package]] +name = "sphinx" +version = "7.3.7" +description = "Python documentation generator" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"}, + {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"}, +] + +[package.dependencies] +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18.1,<0.22" +imagesize = ">=1.3" +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.14" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "importlib_metadata", "mypy (==1.9.0)", "pytest (>=6.0)", "ruff (==0.3.7)", "sphinx-lint", "tomli", "types-docutils", "types-requests"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=6.0)", "setuptools (>=67.0)"] + +[[package]] +name = "sphinx-autobuild" +version = "2024.4.16" +description = "Rebuild Sphinx documentation on changes, with hot reloading in the browser." +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx_autobuild-2024.4.16-py3-none-any.whl", hash = "sha256:f2522779d30fcbf0253e09714f274ce8c608cb6ebcd67922b1c54de59faba702"}, + {file = "sphinx_autobuild-2024.4.16.tar.gz", hash = "sha256:1c0ed37a1970eed197f9c5a66d65759e7c4e4cba7b5a5d77940752bf1a59f2c7"}, +] + +[package.dependencies] +colorama = "*" +sphinx = "*" +starlette = ">=0.35" +uvicorn = ">=0.25" +watchfiles = ">=0.20" +websockets = ">=11" + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "sphinx-rtd-theme" +version = "2.0.0" +description = "Read the Docs theme for Sphinx" +optional = false +python-versions = ">=3.6" +files = [ + {file = "sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586"}, + {file = "sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"}, +] + +[package.dependencies] +docutils = "<0.21" +sphinx = ">=5,<8" +sphinxcontrib-jquery = ">=4,<5" + +[package.extras] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.8" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, + {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.6" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, + {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.5" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, + {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +description = "Extension to include jQuery on newer Sphinx releases" +optional = false +python-versions = ">=2.7" +files = [ + {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, + {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, +] + +[package.dependencies] +Sphinx = ">=1.8" + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.7" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, + {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.10" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, + {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "starlette" +version = "0.37.2" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, + {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typer" +version = "0.12.3" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, + {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "uc-micro-py" +version = "1.0.3" +description = "Micro subset of unicode data files for linkify-it-py projects." +optional = false +python-versions = ">=3.7" +files = [ + {file = "uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a"}, + {file = "uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5"}, +] + +[package.extras] +test = ["coverage", "pytest", "pytest-cov"] + +[[package]] +name = "ujson" +version = "5.10.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, + {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, + {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, + {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, + {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, + {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, + {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, + {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, + {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, + {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, + {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, + {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, + {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, + {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, + {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.25.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.25.0-py3-none-any.whl", hash = "sha256:ce107f5d9bd02b4636001a77a4e74aab5e1e2b146868ebbad565237145af444c"}, + {file = "uvicorn-0.25.0.tar.gz", hash = "sha256:6dddbad1d7ee0f5140aba5ec138ddc9612c5109399903828b4874c9937f009c2"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.19.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, + {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "virtualenv" +version = "20.26.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "watchfiles" +version = "0.22.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchfiles-0.22.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:da1e0a8caebf17976e2ffd00fa15f258e14749db5e014660f53114b676e68538"}, + {file = "watchfiles-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61af9efa0733dc4ca462347becb82e8ef4945aba5135b1638bfc20fad64d4f0e"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9188979a58a096b6f8090e816ccc3f255f137a009dd4bbec628e27696d67c1"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2bdadf6b90c099ca079d468f976fd50062905d61fae183f769637cb0f68ba59a"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:067dea90c43bf837d41e72e546196e674f68c23702d3ef80e4e816937b0a3ffd"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf8a20266136507abf88b0df2328e6a9a7c7309e8daff124dda3803306a9fdb"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1235c11510ea557fe21be5d0e354bae2c655a8ee6519c94617fe63e05bca4171"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2444dc7cb9d8cc5ab88ebe792a8d75709d96eeef47f4c8fccb6df7c7bc5be71"}, + {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c5af2347d17ab0bd59366db8752d9e037982e259cacb2ba06f2c41c08af02c39"}, + {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9624a68b96c878c10437199d9a8b7d7e542feddda8d5ecff58fdc8e67b460848"}, + {file = "watchfiles-0.22.0-cp310-none-win32.whl", hash = "sha256:4b9f2a128a32a2c273d63eb1fdbf49ad64852fc38d15b34eaa3f7ca2f0d2b797"}, + {file = "watchfiles-0.22.0-cp310-none-win_amd64.whl", hash = "sha256:2627a91e8110b8de2406d8b2474427c86f5a62bf7d9ab3654f541f319ef22bcb"}, + {file = "watchfiles-0.22.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8c39987a1397a877217be1ac0fb1d8b9f662c6077b90ff3de2c05f235e6a8f96"}, + {file = "watchfiles-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a927b3034d0672f62fb2ef7ea3c9fc76d063c4b15ea852d1db2dc75fe2c09696"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052d668a167e9fc345c24203b104c313c86654dd6c0feb4b8a6dfc2462239249"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e45fb0d70dda1623a7045bd00c9e036e6f1f6a85e4ef2c8ae602b1dfadf7550"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c49b76a78c156979759d759339fb62eb0549515acfe4fd18bb151cc07366629c"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a65474fd2b4c63e2c18ac67a0c6c66b82f4e73e2e4d940f837ed3d2fd9d4da"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc0cba54f47c660d9fa3218158b8963c517ed23bd9f45fe463f08262a4adae1"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ebe84a035993bb7668f58a0ebf998174fb723a39e4ef9fce95baabb42b787f"}, + {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e0f0a874231e2839abbf473256efffe577d6ee2e3bfa5b540479e892e47c172d"}, + {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:213792c2cd3150b903e6e7884d40660e0bcec4465e00563a5fc03f30ea9c166c"}, + {file = "watchfiles-0.22.0-cp311-none-win32.whl", hash = "sha256:b44b70850f0073b5fcc0b31ede8b4e736860d70e2dbf55701e05d3227a154a67"}, + {file = "watchfiles-0.22.0-cp311-none-win_amd64.whl", hash = "sha256:00f39592cdd124b4ec5ed0b1edfae091567c72c7da1487ae645426d1b0ffcad1"}, + {file = "watchfiles-0.22.0-cp311-none-win_arm64.whl", hash = "sha256:3218a6f908f6a276941422b035b511b6d0d8328edd89a53ae8c65be139073f84"}, + {file = "watchfiles-0.22.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c7b978c384e29d6c7372209cbf421d82286a807bbcdeb315427687f8371c340a"}, + {file = "watchfiles-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd4c06100bce70a20c4b81e599e5886cf504c9532951df65ad1133e508bf20be"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:425440e55cd735386ec7925f64d5dde392e69979d4c8459f6bb4e920210407f2"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68fe0c4d22332d7ce53ad094622b27e67440dacefbaedd29e0794d26e247280c"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a31bfd98f846c3c284ba694c6365620b637debdd36e46e1859c897123aa232"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2e8fe41f3cac0660197d95216c42910c2b7e9c70d48e6d84e22f577d106fc1"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b7cc10261c2786c41d9207193a85c1db1b725cf87936df40972aab466179b6"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28585744c931576e535860eaf3f2c0ec7deb68e3b9c5a85ca566d69d36d8dd27"}, + {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00095dd368f73f8f1c3a7982a9801190cc88a2f3582dd395b289294f8975172b"}, + {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:52fc9b0dbf54d43301a19b236b4a4614e610605f95e8c3f0f65c3a456ffd7d35"}, + {file = "watchfiles-0.22.0-cp312-none-win32.whl", hash = "sha256:581f0a051ba7bafd03e17127735d92f4d286af941dacf94bcf823b101366249e"}, + {file = "watchfiles-0.22.0-cp312-none-win_amd64.whl", hash = "sha256:aec83c3ba24c723eac14225194b862af176d52292d271c98820199110e31141e"}, + {file = "watchfiles-0.22.0-cp312-none-win_arm64.whl", hash = "sha256:c668228833c5619f6618699a2c12be057711b0ea6396aeaece4ded94184304ea"}, + {file = "watchfiles-0.22.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d47e9ef1a94cc7a536039e46738e17cce058ac1593b2eccdede8bf72e45f372a"}, + {file = "watchfiles-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28f393c1194b6eaadcdd8f941307fc9bbd7eb567995232c830f6aef38e8a6e88"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd64f3a4db121bc161644c9e10a9acdb836853155a108c2446db2f5ae1778c3d"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2abeb79209630da981f8ebca30a2c84b4c3516a214451bfc5f106723c5f45843"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc382083afba7918e32d5ef12321421ef43d685b9a67cc452a6e6e18920890e"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d048ad5d25b363ba1d19f92dcf29023988524bee6f9d952130b316c5802069cb"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:103622865599f8082f03af4214eaff90e2426edff5e8522c8f9e93dc17caee13"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e1f3cf81f1f823e7874ae563457828e940d75573c8fbf0ee66818c8b6a9099"}, + {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8597b6f9dc410bdafc8bb362dac1cbc9b4684a8310e16b1ff5eee8725d13dcd6"}, + {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b04a2cbc30e110303baa6d3ddce8ca3664bc3403be0f0ad513d1843a41c97d1"}, + {file = "watchfiles-0.22.0-cp38-none-win32.whl", hash = "sha256:b610fb5e27825b570554d01cec427b6620ce9bd21ff8ab775fc3a32f28bba63e"}, + {file = "watchfiles-0.22.0-cp38-none-win_amd64.whl", hash = "sha256:fe82d13461418ca5e5a808a9e40f79c1879351fcaeddbede094028e74d836e86"}, + {file = "watchfiles-0.22.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3973145235a38f73c61474d56ad6199124e7488822f3a4fc97c72009751ae3b0"}, + {file = "watchfiles-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:280a4afbc607cdfc9571b9904b03a478fc9f08bbeec382d648181c695648202f"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0d883351a34c01bd53cfa75cd0292e3f7e268bacf2f9e33af4ecede7e21d1d"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9165bcab15f2b6d90eedc5c20a7f8a03156b3773e5fb06a790b54ccecdb73385"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc1b9b56f051209be458b87edb6856a449ad3f803315d87b2da4c93b43a6fe72"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc1fc25a1dedf2dd952909c8e5cb210791e5f2d9bc5e0e8ebc28dd42fed7562"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc92d2d2706d2b862ce0568b24987eba51e17e14b79a1abcd2edc39e48e743c8"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97b94e14b88409c58cdf4a8eaf0e67dfd3ece7e9ce7140ea6ff48b0407a593ec"}, + {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96eec15e5ea7c0b6eb5bfffe990fc7c6bd833acf7e26704eb18387fb2f5fd087"}, + {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:28324d6b28bcb8d7c1041648d7b63be07a16db5510bea923fc80b91a2a6cbed6"}, + {file = "watchfiles-0.22.0-cp39-none-win32.whl", hash = "sha256:8c3e3675e6e39dc59b8fe5c914a19d30029e36e9f99468dddffd432d8a7b1c93"}, + {file = "watchfiles-0.22.0-cp39-none-win_amd64.whl", hash = "sha256:25c817ff2a86bc3de3ed2df1703e3d24ce03479b27bb4527c57e722f8554d971"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b810a2c7878cbdecca12feae2c2ae8af59bea016a78bc353c184fa1e09f76b68"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"}, + {file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "websockets" +version = "12.0" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "c7cf019397dbc2fd8a50d93c15fde94b7231c1a69324f16cc317a37c6d871fb8" diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 0000000..efa46ec --- /dev/null +++ b/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..b5a6d4f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,109 @@ +[tool.poetry] +name = "pyrorisks" +version = "0.1.0" +description = "Data pre-processing pipelines and models for wildfire forecasting and monitoring" +authors = ["Pyronear "] +license = "Apache-2.0" +readme = "README.md" +packages = [{include = "pyrorisks"}, {include = "app"}] +classifiers=[ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Mathematics", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Topic :: Software Development", + "Topic :: Software Development :: Libraries", + "Topic :: Software Development :: Libraries :: Python Modules", +] +keywords=["data science", "time series", "machine learning"] + + +[tool.poetry.dependencies] +python = "^3.10" +requests = "^2.31.0" +geopandas = "1.0.1" +boto3 = "^1.28.62" +shapely = "^2.0.4" +rasterio = [{path = "./build/rasterio-1.3.10-cp39-cp39-manylinux2014_x86_64.whl", platform = "linux", python = ">=3.10 <3.12"}] + + +[tool.poetry.group.dev.dependencies] +pytest = "^7.4.2" +pre-commit = "^3.4.0" +mypy = "^1.10.0" +ruff = "^0.5.1" +pytest-cov = "^5.0.0" + + +[tool.poetry.group.app.dependencies] +fastapi = "^0.111.0" +uvicorn = "^0.25" + + +[tool.poetry.group.docs.dependencies] +sphinx = "^7.3.5" +sphinx-rtd-theme = "^2.0.0" +myst-parser = "^3.0.1" +sphinx-autobuild = "^2024.4.16" +jinja2 = "^3.1.4" +linkify-it-py = "^2.0.3" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +lint.ignore = [ + "F401", # line too long, handled by black + "E402", # do not perform function calls in argument defaults + "E265", # raise from + "F403", # too complex + "F821", # list comprehension to list() + "W605", # list comprehension to list() +] +exclude = [".git", "venv", "docs", "build"] +line-length = 120 +target-version = "py310" +preview = true + +[tool.mypy] +python_version = "3.10" +files = ["pyrorisks/**/*.py", "app/**/*.py"] +show_error_codes = true +pretty = true + +[[tool.mypy.overrides]] +module = [ + "dotenv", + "xarray", + "pandas", + "numpy", + "geopandas", + "shapely", + "shapely.geometry", + "rasterio", + "cdsapi", + "urllib3", + "joblib", + "matplotlib", + "scipy", + "netCDF4", + "pyrorisks", + "requests", + "boto3", + "pyro_risks.utils.fwi_helpers", + "pyro_risks.utils.s3" +] +ignore_missing_imports = true + +[tool.black] +line-length = 120 +target-version = ['py310'] \ No newline at end of file diff --git a/pyro_risks/config/__init__.py b/pyro_risks/config/__init__.py deleted file mode 100644 index 4e943a6..0000000 --- a/pyro_risks/config/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .datasets import * -from .models import * diff --git a/pyro_risks/config/datasets.py b/pyro_risks/config/datasets.py deleted file mode 100644 index 158c181..0000000 --- a/pyro_risks/config/datasets.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import os -from dotenv import load_dotenv - -# If there is an .env, load it -load_dotenv() - -FR_GEOJSON: str = "https://france-geojson.gregoiredavid.fr/repo/departements.geojson" -DATA_FALLBACK: str = ( - "https://github.com/pyronear/pyro-risks/releases/download/v0.1.0-data" -) -FR_GEOJSON_FALLBACK: str = f"{DATA_FALLBACK}/departements.geojson" -FR_FIRES_FALLBACK: str = f"{DATA_FALLBACK}/export_BDIFF_incendies_20201027.csv" -FR_WEATHER_FALLBACK: str = f"{DATA_FALLBACK}/noaa_weather_20201025.csv" -FR_NASA_FIRMS_FALLBACK: str = f"{DATA_FALLBACK}/NASA_FIRMS.json" -FR_NASA_VIIRS_FALLBACK: str = f"{DATA_FALLBACK}/NASA_FIRMS_VIIRS_2018_2020.csv" -FR_FWI_2019_FALLBACK: str = f"{DATA_FALLBACK}/JRC_FWI_2019.zip" -FR_FWI_2020_FALLBACK: str = f"{DATA_FALLBACK}/JRC_FWI_2020.zip" -FR_ERA5LAND_FALLBACK: str = f"{DATA_FALLBACK}/ERA5_2019.nc" -FR_ERA5T_FALLBACK: str = f"{DATA_FALLBACK}/era5t_2019.nc" -DATASET: str = "merged_era_viirs.csv" -ERA5T_VIIRS_PIPELINE: str = f"{DATA_FALLBACK}/merged_era_viirs.csv" -TEST_FR_ERA5LAND_FALLBACK: str = f"{DATA_FALLBACK}/test_data_ERA5_2018.nc" -TEST_FR_FIRMS_CSV_FALLBACK: str = f"{DATA_FALLBACK}/test_data_FIRMS.csv" -TEST_FR_FIRMS_XLSX_FALLBACK: str = f"{DATA_FALLBACK}/test_data_FIRMS.xlsx" -TEST_FR_VIIRS_XLSX_FALLBACK: str = f"{DATA_FALLBACK}/test_data_VIIRS.xlsx" -TEST_FR_VIIRS_JSON_FALLBACK: str = f"{DATA_FALLBACK}/test_data_VIIRS.json" -TEST_FR_ERA5_2019_FALLBACK: str = f"{DATA_FALLBACK}/test_data_ERA5_2019.nc" -TEST_FR_ERA5T_FALLBACK: str = f"{DATA_FALLBACK}/test_era5t_to_merge.nc" -TEST_FWI_FALLBACK: str = f"{DATA_FALLBACK}/test_data_FWI.csv" -TEST_FWI_TO_PREDICT: str = f"{DATA_FALLBACK}/fwi_test_to_predict.csv" -TEST_ERA_TO_PREDICT: str = f"{DATA_FALLBACK}/era_test_to_predict.csv" - -REPO_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../")) -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../")) - - -CDS_URL = "https://cds.climate.copernicus.eu/api/v2" -CDS_UID = os.getenv("CDS_UID") -CDS_API_KEY = os.getenv("CDS_API_KEY") - -RFMODEL_PATH_FALLBACK: str = f"{DATA_FALLBACK}/pyrorisk_rfc_111220.pkl" -RFMODEL_ERA5T_PATH_FALLBACK: str = f"{DATA_FALLBACK}/pyrorisk_rfc_era5t_151220.pkl" -XGBMODEL_PATH_FALLBACK: str = f"{DATA_FALLBACK}/pyrorisk_xgb_091220.pkl" -XGBMODEL_ERA5T_PATH_FALLBACK: str = f"{DATA_FALLBACK}/pyrorisk_xgb_era5t_151220.pkl" - -FWI_VARS = ["fwi", "ffmc", "dmc", "dc", "isi", "bui", "dsr"] - -WEATHER_VARS = [ - "u10", - "v10", - "d2m", - "t2m", - "fal", - "lai_hv", - "lai_lv", - "skt", - "asn", - "snowc", - "rsn", - "sde", - "sd", - "sf", - "smlt", - "stl1", - "stl2", - "stl3", - "stl4", - "slhf", - "ssr", - "str", - "sp", - "sshf", - "ssrd", - "strd", - "tsn", - "tp", -] - -WEATHER_ERA5T_VARS = [ - "asn", - "d2m", - "e", - "es", - "fal", - "lai_hv", - "lai_lv", - "lblt", - "licd", - "lict", - "lmld", - "lmlt", - "lshf", - "ltlt", - "pev", - "ro", - "rsn", - "sd", - "sf", - "skt", - "slhf", - "smlt", - "sp", - "src", - "sro", - "sshf", - "ssr", - "ssrd", - "ssro", - "stl1", - "stl2", - "stl3", - "stl4", - "str", - "strd", - "swvl1", - "swvl2", - "swvl3", - "swvl4", - "t2m", - "tp", - "tsn", - "u10", - "v10", -] - -CACHE_FOLDER: str = "./.cache/" - -DATA_REGISTRY = os.path.join(CACHE_FOLDER, "data_registry/") -MODEL_REGISTRY = os.path.join(CACHE_FOLDER, "model_registry/") -METADATA_REGISTRY = os.path.join(CACHE_FOLDER, "metadata_registry/") -PREDICTIONS_REGISTRY = os.path.join(CACHE_FOLDER, "predictions_registry/") - -DATASET_PATH = os.path.join(DATA_REGISTRY, DATASET) -PIPELINE_INPUT_PATH = os.path.join(PREDICTIONS_REGISTRY, "pipeline_inputs.csv") -RFMODEL_ERA5T_PATH = os.path.join(MODEL_REGISTRY, "RF.joblib") -XGBMODEL_ERA5T_PATH = os.path.join(MODEL_REGISTRY, "XGBOOST.joblib") - -os.makedirs(CACHE_FOLDER, exist_ok=True) -os.makedirs(DATA_REGISTRY, exist_ok=True) -os.makedirs(MODEL_REGISTRY, exist_ok=True) -os.makedirs(METADATA_REGISTRY, exist_ok=True) -os.makedirs(PREDICTIONS_REGISTRY, exist_ok=True) diff --git a/pyro_risks/config/models.py b/pyro_risks/config/models.py deleted file mode 100644 index d78fe1a..0000000 --- a/pyro_risks/config/models.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -ZONE_VAR = "departement" - -DATE_VAR = "day" - -TARGET = "fires" - -PIPELINE_ERA5T_VARS = [ - "strd_min", - "isi_min", - "strd_max", - "d2m_mean", - "lai_hv_mean", - "str_mean", - "ffmc_mean", - "strd_mean", - "swvl1_mean", - "asn_min", - "fwi_mean", - "asn_std", - "ssr_mean", - "str_max", - "d2m_min", - "rsn_std", - "ssrd_min", - "isi_mean", - "ssrd_mean", - "isi_max", - "ffmc_max", - "ffmc_min", - "ssr_min", - "str_min", - "ffmc_std", -] - -MODEL_ERA5T_VARS = [ - "str_max", - "str_mean", - "ffmc_min", - "str_min", - "ffmc_mean", - "str_mean_lag1", - "str_max_lag1", - "str_min_lag1", - "isi_min", - "ffmc_min_lag1", - "isi_mean", - "ffmc_mean_lag1", - "ffmc_std", - "ffmc_max", - "isi_min_lag1", - "isi_mean_lag1", - "ffmc_max_lag1", - "asn_std", - "strd_max", - "ssrd_min", - "strd_mean", - "isi_max", - "strd_min", - "d2m_min", - "asn_min", - "ssr_min", - "ffmc_min_lag3", - "ffmc_std_lag1", - "lai_hv_mean_lag7", - "str_max_lag3", - "str_mean_lag3", - "rsn_std_lag1", - "fwi_mean", - "ssr_mean", - "ssrd_mean", - "swvl1_mean", - "rsn_std_lag3", - "isi_max_lag1", - "d2m_mean", - "rsn_std", -] - -SELECTED_DEP = [ - "Aisne", - "Alpes-Maritimes", - "Ardèche", - "Ariège", - "Aude", - "Aveyron", - "Cantal", - "Eure", - "Eure-et-Loir", - "Gironde", - "Haute-Corse", - "Hautes-Pyrénées", - "Hérault", - "Indre", - "Landes", - "Loiret", - "Lozère", - "Marne", - "Oise", - "Pyrénées-Atlantiques", - "Pyrénées-Orientales", - "Sarthe", - "Somme", - "Yonne", -] - -LAG_ERA5T_VARS = ["_".join(x.split("_")[:-1]) for x in MODEL_ERA5T_VARS if "_lag" in x] - -USECOLS = [DATE_VAR, ZONE_VAR, TARGET] + PIPELINE_ERA5T_VARS - -PIPELINE_COLS = [DATE_VAR, ZONE_VAR] + PIPELINE_ERA5T_VARS - -TEST_SIZE = 0.2 - -RANDOM_STATE = 42 - -RF_PARAMS = { - "n_estimators": 500, - "min_samples_leaf": 10, - "max_features": "sqrt", - "class_weight": "balanced", - "criterion": "gini", - "random_state": 10, - "n_jobs": -1, - "verbose": 3, -} - -XGB_PARAMS = { - "n_estimators": 1000, - "max_depth": 10, - "learning_rate": 0.01, - "min_child_weight": 10, - "subsample": 0.8, - "colsample_bytree": 0.8, - "objective": "binary:logistic", - "random_state": 10, - "n_jobs": -1, - "verbosity": 2, -} - - -XGB_FIT_PARAMS = {"early_stopping_rounds": 50, "eval_metric": ["logloss", "aucpr"]} diff --git a/pyro_risks/datasets/ERA5.py b/pyro_risks/datasets/ERA5.py deleted file mode 100644 index 1b3a7b2..0000000 --- a/pyro_risks/datasets/ERA5.py +++ /dev/null @@ -1,218 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import logging -from typing import Optional - -import os -import geopandas as gpd -import pandas as pd -import numpy as np -import requests -import xarray as xr -import tempfile - -from pyro_risks import config as cfg -from .masks import get_french_geom -from pyro_risks.datasets.queries_api import call_era5land, call_era5t - - -__all__ = ["ERA5Land", "ERA5T"] - - -def get_data_era5land_for_predict(date: str) -> pd.DataFrame: - """ - Get ERA5Land dataframe for given date using call to cdsapi - and appropriate class. - ​ - Args: - date: str - Date with the following format: "YEAR-MONTH-DAY" eg. "2020-05-12" - ​ - Returns: pd.DataFrame - Dataframe containing ERA5 Land data for the requested day. - """ - with tempfile.TemporaryDirectory() as tmp: - year, month, day = date.split("-") - call_era5land(tmp, year, month, day) - # TODO: make sure that the directory works when on server - data = ERA5Land( - source_path=os.path.join(tmp, f"era5land_{year}_{month}_{day}.nc") - ) - - # Lag J-1 - lag = np.datetime64(date) - np.timedelta64(1, "D") - year, month, day = str(lag).split("-") - call_era5land(tmp, year, month, day) - dataJ1 = ERA5Land( - source_path=os.path.join(tmp, f"era5land_{year}_{month}_{day}.nc") - ) - - # Lag J-3 - lag = np.datetime64(date) - np.timedelta64(3, "D") - year, month, day = str(lag).split("-") - call_era5land(tmp, year, month, day) - dataJ3 = ERA5Land( - source_path=os.path.join(tmp, f"era5land_{year}_{month}_{day}.nc") - ) - - # Lag J-7 - lag = np.datetime64(date) - np.timedelta64(7, "D") - year, month, day = str(lag).split("-") - call_era5land(tmp, year, month, day) - dataJ7 = ERA5Land( - source_path=os.path.join(tmp, f"era5land_{year}_{month}_{day}.nc") - ) - - merged_data = pd.concat([data, dataJ1, dataJ3, dataJ7], ignore_index=True) - return merged_data - - -def get_data_era5t_for_predict(date: str) -> pd.DataFrame: - """ - Get ERA5T dataframe for given date using call to cdsapi - and appropriate class. - ​ - Args: - date: str - Date with the following format: "YEAR-MONTH-DAY" eg. "2020-05-12" - ​ - Returns: pd.DataFrame - Dataframe containing ERA5T data for the requested day. - """ - with tempfile.TemporaryDirectory() as tmp: - year, month, day = date.split("-") - call_era5t(tmp, year, month, day) - # TODO: make sure that the directory works when on server - data = ERA5T(source_path=os.path.join(tmp, f"era5t_{year}_{month}_{day}.nc")) - # Lag J-1 - lag = np.datetime64(f"{year}-{month}-{day}") - np.timedelta64(1, "D") - year, month, day = str(lag).split("-") - call_era5t(tmp, year, month, day) - dataJ1 = ERA5T(source_path=os.path.join(tmp, f"era5t_{year}_{month}_{day}.nc")) - # Lag J-3 - lag = np.datetime64(f"{year}-{month}-{day}") - np.timedelta64(3, "D") - year, month, day = str(lag).split("-") - call_era5t(tmp, year, month, day) - dataJ3 = ERA5T(source_path=os.path.join(tmp, f"era5t_{year}_{month}_{day}.nc")) - # Lag J-7 - lag = np.datetime64(f"{year}-{month}-{day}") - np.timedelta64(7, "D") - year, month, day = str(lag).split("-") - call_era5t(tmp, year, month, day) - dataJ7 = ERA5T(source_path=os.path.join(tmp, f"era5t_{year}_{month}_{day}.nc")) - merged_data = pd.concat([data, dataJ1, dataJ3, dataJ7], ignore_index=True) - return merged_data - - -class ERA5Land(pd.DataFrame): - """Provides ERA5-Land clean dataset as a pandas dataframe. - - ERA5-Land is a reanalysis dataset providing a consistent view of the evolution of land variables - over several decades at an enhanced resolution compared to ERA5. ERA5-Land uses as input to - control the simulated land fields ERA5 atmospheric variables, such as air temperature and air humidity. - Using cdaspi https://pypi.org/project/cdsapi/ with access key, the user can get the dataset - at https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-era5-land?tab=overview - - The provided dataset has to be in netCDF4 format here. - - Args: - source_path: str - Path or URL to your version of the source data - """ - - def __init__(self, source_path: Optional[str] = None) -> None: - """ - Args: - source_path: Optional[str] - Path or URL to your version of the source data - """ - if not isinstance(source_path, str): - # Download in cache - logging.warning( - f"No data source specified for {self.__class__.__name__}, trying fallback." - ) - source_path = cfg.FR_ERA5LAND_FALLBACK - - if source_path.startswith("http"): - with requests.get(source_path) as resp: - ds = xr.open_dataset(resp.content) - data = ds.to_dataframe() - else: - ds = xr.open_dataset(source_path) - data = ds.to_dataframe() - - # Drop NaNs which correspond to no land - data = data.dropna() - data = data.reset_index() - - data["time"] = pd.to_datetime( - data["time"], format="%Y-%m-%d %H:%M:%S", errors="coerce" - ) - data["time"] = data["time"].dt.normalize() - - # Transform into geopandas dataframe - geo_data = gpd.GeoDataFrame( - data, - geometry=gpd.points_from_xy(data["longitude"], data["latitude"]), - crs="EPSG:4326", - ) - - # Match the polygons using the ones of each predefined country area - geo_masks = get_french_geom() - geo_df = gpd.sjoin(geo_masks, geo_data, how="inner") - super().__init__(geo_df.drop(["index_right", "geometry"], axis=1)) - - -class ERA5T(pd.DataFrame): - """Provides ERA5T clean dataset as a pandas dataframe. - - The provided dataset has to be in netCDF4 format here. - - Args: - source_path: str - Path or URL to your version of the source data - """ - - def __init__(self, source_path: Optional[str] = None) -> None: - """ - Args: - source_path: Optional[str] - Path or URL to your version of the source data - """ - if not isinstance(source_path, str): - # Download in cache - logging.warning( - f"No data source specified for {self.__class__.__name__}, trying fallback." - ) - source_path = cfg.FR_ERA5T_FALLBACK - - if source_path.startswith("http"): - with requests.get(source_path) as resp: - ds = xr.open_dataset(resp.content) - data = ds.to_dataframe() - else: - ds = xr.open_dataset(source_path) - data = ds.to_dataframe() - - # Drop columns with NaNs - data = data.dropna(axis=1) - data = data.reset_index() - - data["time"] = pd.to_datetime( - data["time"], format="%Y-%m-%d %H:%M:%S", errors="coerce" - ) - data["time"] = data["time"].dt.normalize() - - # Transform into geopandas dataframe - geo_data = gpd.GeoDataFrame( - data, - geometry=gpd.points_from_xy(data["longitude"], data["latitude"]), - crs="EPSG:4326", - ) - - # Match the polygons using the ones of each predefined country area - geo_masks = get_french_geom() - geo_df = gpd.sjoin(geo_masks, geo_data, how="inner") - super().__init__(geo_df.drop(["index_right", "geometry"], axis=1)) diff --git a/pyro_risks/datasets/__init__.py b/pyro_risks/datasets/__init__.py deleted file mode 100644 index 29ac9cf..0000000 --- a/pyro_risks/datasets/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from .nasa_wildfires import * -from .weather import * -from .wildfires import * -from .masks import * -from .ERA5 import * -from .era_fwi_viirs import * -from . import utils diff --git a/pyro_risks/datasets/datasets_mergers.py b/pyro_risks/datasets/datasets_mergers.py deleted file mode 100644 index 3401948..0000000 --- a/pyro_risks/datasets/datasets_mergers.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import pandas as pd - -from .utils import ( - find_closest_weather_station, - find_closest_location, - get_nearest_points, -) - - -def merge_datasets_by_departements( - dataframe1: pd.DataFrame, - time_col1: str, - geometry_col1: str, - dataframe2: pd.DataFrame, - time_col2: str, - geometry_col2: str, - how: str, -) -> pd.DataFrame: - """ - Merge two datasets containing some kind of geometry and date columns. - The merge is down on [time_col1, time_col2] and [geometry_col1, geometry_col2]. - Here the geometry is based on French departements. Therefore the geometry columns - should contains either the code on the departement or its geometry (should be - consistent throughout both datasets). - - Finally the merge is done according to the `how` parameter. Keep me mind that - this parameter must be so that the merged dataframe keeps similar dimensions to the - weather dataframe. This is because if there is an inner join, we will keep only the days - where wildfires were declared. Therefore if the weather dataframe is the left frame, then - `how` must be left, if it is the right frame, `how` must be right. - - Args: - dataframe1: pd.DataFrame - First dataframe, containing a time column and a geometry one. - time_col1: str - Name of the time column of dataframe1 on which the merge will be done. - geometry_col1: str - Name of the geometry column of dataframe1 on which the merge will be done. - dataframe2: pd.DataFrame - Second dataframe, containing a time column and a geometry one. - time_col2: str - Name of the time column of dataframe2 on which the merge will be done. - geometry_col2: str - Name of the geometry column of dataframe2 on which the merge will be done. - how: - Parameter of the merge, should correspond to which of the left or right frame - the weather dataframe is. - - Returns: pd.DataFrame - Merged dataset on French departement. - """ - merged_data = pd.merge( - dataframe1, - dataframe2, - left_on=[time_col1, geometry_col1], - right_on=[time_col2, geometry_col2], - how=how, - ) - return merged_data - - -def merge_datasets_by_closest_weather_station( - df_weather: pd.DataFrame, - time_col_weather: str, - df_fires: pd.DataFrame, - time_col_fires: str, -) -> pd.DataFrame: - """ - Merge two datasets: one of weather conditions and the other of wildfires history data. - Each dataset must contain a time column, and the weather dataset must have a `STATION` - column which allows to identify uniquely each station. The merge is done by finding the - closest weather station to each (lat, lon) point of the wildfires history dataset. The - latter is then grouped by date and closest_weather_station, which then allows to join it - with the weather conditions dataframe. - - Args: - df_weather: pd.DataFrame - Weather conditions dataframe. Must have a `STATION` column to identify each - weather station. - time_col_weather: str - Name of the time column in `df_weather`. - df_fires: pd.DataFrame - Wildfires history dataset, must have points described by their latitude and - longitude. - time_col_fires: str - Name of the time column in `df_fires`. - - Returns: pd.DataFrame - Merged dataset by weather station proximity. - """ - # For wildfires dataframe, need to find for each point the closest weather station - df_fires["closest_weather_station"] = df_fires.apply( - lambda row: find_closest_weather_station( - df_weather, row["latitude"], row["longitude"] - ), - axis=1, - ) - - grouped_fires = ( - df_fires.groupby(["closest_weather_station", "acq_date"], observed=True) - .first() - .reset_index() - ) - - merged_data = pd.merge( - df_weather, - grouped_fires, - left_on=[time_col_weather, "STATION"], - right_on=[time_col_fires, "closest_weather_station"], - how="left", - ) - return merged_data - - -def merge_datasets_by_closest_weather_point( - df_weather: pd.DataFrame, - time_col_weather: str, - df_fires: pd.DataFrame, - time_col_fires: str, -) -> pd.DataFrame: - """ - Merge weather and fire datasets when the weather dataset is provided using satellite - data such as ERA5 Land hourly dataset provided here - https://cds.climate.copernicus.eu/cdsapp#!/dataset/reanalysis-era5-land?tab=form - and accessible through cdsapi. - - Args: - df_weather: pd.DataFrame - Weather conditions dataframe, must have "latitude" and "longitude" columns. - time_col_weather: str - Name of the time column in `df_weather`. - df_fires: pd.DataFrame - Wildfires history dataset, must have points described by their latitude and - longitude. - time_col_fires: str - Name of the time column in `df_fires`. - - Returns: pd.DataFrame - Merged dataset by weather station proximity. - """ - # For wildfires dataframe, need to find for each point the closest weather station - df_fires["closest_weather_point"] = df_fires.apply( - lambda row: find_closest_location( - df_weather, row["latitude"], row["longitude"] - ), - axis=1, - ) - - grouped_fires = ( - df_fires.groupby(["closest_weather_point", "acq_date"], observed=True) - .first() - .reset_index() - ) - - grouped_fires["weather_lat"], grouped_fires["weather_lon"] = ( - grouped_fires["closest_weather_point"].str[0], - grouped_fires["closest_weather_point"].str[1], - ) - - merged_data = pd.merge( - df_weather, - grouped_fires, - left_on=[time_col_weather, "latitude", "longitude"], - right_on=[time_col_fires, "weather_lat", "weather_lon"], - how="left", - ) - return merged_data - - -def merge_by_proximity( - df_left: pd.DataFrame, - time_col_left: str, - df_right: pd.DataFrame, - time_col_right: str, - how: str, -) -> pd.DataFrame: - """ - Merge df_left and df_right by finding in among all points in df_left, the closest point in df_right. - For instance, df_left can be a history wildfires dataset and df_right a weather conditions datasets and - we want to match each wildfire with its closest weather point. - This can also be used if, for instance, we want to merge FWI dataset (df_left) with ERA5/VIIRS datatset - (df_right). - - Args: - df_left: pd.DataFrame - Left dataframe, must have "latitude" and "longitude" columns. - time_col_left: str - Name of the time column in `df_left`. - df_right: pd.DataFrame - Right dataset, must have points described by their latitude and - longitude. - time_col_right: str - Name of the time column in `df_right`. - how: str - How the pandas merge needs to be done. - - Returns: - Merged dataset by point (lat/lon) proximity. - """ - # get all df_right points in adequate format - df_tmp = df_right.drop_duplicates(subset=["latitude", "longitude"]) - df_tmp = df_tmp.reset_index(drop=True) - lat_right = df_tmp["latitude"].values - lon_right = df_tmp["longitude"].values - candidates = list(zip(lat_right, lon_right)) - - df_tmp2 = df_left.drop_duplicates(subset=["latitude", "longitude"]) - source_points = list(zip(df_tmp2["latitude"].values, df_tmp2["longitude"].values)) - - indices, _ = get_nearest_points(source_points, candidates) - - dict_idx_lat_lon = {} - for idx in set(indices): - df_tmp3 = df_tmp[df_tmp.index == idx] - dict_idx_lat_lon[idx] = ( - df_tmp3["latitude"].values[0], - df_tmp3["longitude"].values[0], - ) - - dict_source_idx = dict(zip(source_points, indices)) - - df_left["point"] = list(zip(df_left["latitude"], df_left["longitude"])) - - df_left["corresponding_index"] = df_left["point"].map(dict_source_idx) - - df_left["closest_point"] = df_left["corresponding_index"].map(dict_idx_lat_lon) - - df_left["closest_lat"], df_left["closest_lon"] = ( - df_left["closest_point"].str[0], - df_left["closest_point"].str[1], - ) - - merged_data = pd.merge( - df_left, - df_right, - left_on=[time_col_left, "closest_lat", "closest_lon"], - right_on=[time_col_right, "latitude", "longitude"], - how=how, - ) - - merged_data = merged_data.drop( - ["point", "closest_point", "corresponding_index"], axis=1 - ) - return merged_data diff --git a/pyro_risks/datasets/era_fwi_viirs.py b/pyro_risks/datasets/era_fwi_viirs.py deleted file mode 100644 index b866801..0000000 --- a/pyro_risks/datasets/era_fwi_viirs.py +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import logging -import pandas as pd -from typing import Optional - -from pyro_risks.datasets import NASAFIRMS_VIIRS, ERA5Land, ERA5T -from pyro_risks.datasets.utils import get_intersection_range -from pyro_risks.datasets.fwi import GwisFwi -from pyro_risks import config as cfg - -__all__ = ["MergedEraFwiViirs"] - - -logger = logging.getLogger("uvicorn.info") - - -def process_dataset_to_predict(fwi: pd.DataFrame, era: pd.DataFrame) -> pd.DataFrame: - """Groupby and merge fwi and era5 datasets for model predictions. - - Args: - fwi (pd.DataFrame): Fwi dataset - era (pd.DataFrame): Era5 dataset - - Returns: - pd.DataFrame: one line per department and day - """ - weather = era.copy() - weather["time"] = pd.to_datetime( - weather["time"], format="%Y-%m-%d", errors="coerce" - ) - fwi_df = fwi.copy() - fwi_df["day"] = pd.to_datetime(fwi_df["day"], format="%Y-%m-%d", errors="coerce") - - # Group fwi dataframe by day and department and compute min, max, mean, std - agg_fwi_df = ( - fwi_df.groupby(["day", "nom"])[cfg.FWI_VARS] - .agg(["min", "max", "mean", "std"]) - .reset_index() - ) - agg_fwi_df.columns = ["day", "nom"] + [ - x[0] + "_" + x[1] for x in agg_fwi_df.columns if x[1] != "" - ] - - logger.info("Finished aggregationg of FWI") - - # Group weather dataframe by day and department and compute min, max, mean, std - agg_wth_df = ( - weather.groupby(["time", "nom"])[cfg.WEATHER_ERA5T_VARS] - .agg(["min", "max", "mean", "std"]) - .reset_index() - ) - agg_wth_df.columns = ["day", "nom"] + [ - x[0] + "_" + x[1] for x in agg_wth_df.columns if x[1] != "" - ] - - logger.info("Finished aggregationg of weather data") - - # Merge fwi and weather together - res_df = pd.merge(agg_fwi_df, agg_wth_df, on=["day", "nom"], how="inner") - logger.info("Finished merging") - return res_df - - -class MergedEraFwiViirs(pd.DataFrame): - """Create dataframe for modeling described in models/score_v0.py. - - Get weather, nasafirms viirs fires and fwi datasets, then filter some of the lines corresponding - to vegetation fires excluding low confidence ones merges. Finally aggregated versions of the - dataframes by department and by day. - For each of the features of weather and fwi datasets creates min, max, mean and std. - Fires are counted for each department and day. - - Returns: - pd.DataFrame - """ - - def __init__( - self, - era_source_path: Optional[str] = None, - viirs_source_path: Optional[str] = None, - fwi_source_path: Optional[str] = None, - ) -> None: - """Define the merged era-fwi-viirs dataframe. - - Args: - era_source_path (str, optional): Era5 data source path. Defaults to None. - viirs_source_path (str, optional): Viirs data source path. Defaults to None. - fwi_source_path (str, optional): Fwi data source path. Defaults to None. - """ - weather = ERA5T(era_source_path) # ERA5Land(era_source_path) - nasa_firms = NASAFIRMS_VIIRS(viirs_source_path) - - # Time span selection - date_range = get_intersection_range(weather.time, nasa_firms.acq_date) - weather = weather[weather.time.isin(date_range)] - nasa_firms = nasa_firms[nasa_firms.acq_date.isin(date_range)] - - # Keep only vegetation wildfires and remove thermal anomalies with low confidence - where = (nasa_firms["confidence"] != "l") & (nasa_firms["type"] == 0) - nasa_firms = nasa_firms[where] - - # Get FWI dataset for year 2019 (1st september missing) - if fwi_source_path is None: - days = [ - x.strftime("%Y%m%d") - for x in pd.date_range(start="2019-01-01", end="2019-12-31") - ] - days.remove("20190901") - fwi_df = GwisFwi(days_list=days) - else: - fwi_df = pd.read_csv(fwi_source_path) - - # Load FWI dataset - fwi_df["day"] = pd.to_datetime(fwi_df["day"], format="%Y%m%d", errors="coerce") - - # Group fwi dataframe by day and department and compute min, max, mean, std - agg_fwi_df = ( - fwi_df.groupby(["day", "departement"])[cfg.FWI_VARS] - .agg(["min", "max", "mean", "std"]) - .reset_index() - ) - agg_fwi_df.columns = ["day", "departement"] + [ - x[0] + "_" + x[1] for x in agg_fwi_df.columns if x[1] != "" - ] - - # Group weather dataframe by day and department and compute min, max, mean, std - agg_wth_df = ( - weather.groupby(["time", "nom"])[cfg.WEATHER_ERA5T_VARS] - .agg(["min", "max", "mean", "std"]) - .reset_index() - ) - agg_wth_df.columns = ["day", "departement"] + [ - x[0] + "_" + x[1] for x in agg_wth_df.columns if x[1] != "" - ] - - # Merge fwi and weather together - mid_df = pd.merge( - agg_fwi_df, agg_wth_df, on=["day", "departement"], how="inner" - ) - - # Count fires by day and department - fires_count = ( - nasa_firms.groupby(["acq_date", "nom"])["confidence"] - .count() - .to_frame() - .reset_index() - ) - fires_count = fires_count.rename({"confidence": "fires"}, axis=1) - - # Merge fires - final_df = pd.merge( - mid_df, - fires_count, - left_on=["day", "departement"], - right_on=["acq_date", "nom"], - how="left", - ).drop(["acq_date", "nom"], axis=1) - - # Fill lines with no fires with 0 - final_df["fires"] = final_df["fires"].fillna(0) - super().__init__(final_df) diff --git a/pyro_risks/datasets/fwi.py b/pyro_risks/datasets/fwi.py deleted file mode 100644 index 7a9cbc1..0000000 --- a/pyro_risks/datasets/fwi.py +++ /dev/null @@ -1,258 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import pandas as pd -import numpy as np -from netCDF4 import Dataset -import geopandas as gpd -from typing import Optional, List, Dict, Any - -import requests -import zipfile -import os -import urllib.request -import json -import logging -import tempfile - -from shapely.geometry import Point -from shapely import geometry - -from pyro_risks import config as cfg -from pyro_risks.datasets.queries_api import call_fwi -from pyro_risks.datasets.masks import get_french_geom - - -def load_data(output_path: str) -> None: - """Load FWI zipped data from github repo and unzip data in folder output_path. - - Args: - output_path (str): absolute, relative or temporary path - """ - results = requests.get(cfg.FR_FWI_2019_FALLBACK) - - os.makedirs(output_path, exist_ok=True) - with open(os.path.join(output_path, "fwi_folder.zip"), "wb") as f: - f.write(results.content) - - file = zipfile.ZipFile(os.path.join(output_path, "fwi_folder.zip")) - file.extractall(path=os.path.join(output_path, "fwi_unzipped")) - - -def include_department(row: pd.Series, polygons_json: Dict[str, Any]) -> str: - """Given a row of a dataframe containing longitude and latitude returns name of french department. - - This function makes use of shapely to return if a polygon contains a point. - Args: - row (pd.Series): row of dataframe - polygons_json (dict): dict with polygons of the departments - - Returns: - str: name of department or empty string - """ - for i_dep in range(len(polygons_json["features"])): - geom = geometry.shape(polygons_json["features"][i_dep]["geometry"]) - if geom.contains(Point((row["longitude"], row["latitude"]))): - return polygons_json["features"][i_dep]["properties"]["nom"] - return "" - - -def get_fwi_from_api(date: str) -> gpd.GeoDataFrame: - """Call the CDS API and return all fwi variables as a dataframe with geo coordinates and departments. - - When calling the API we get a zip file that must be extracted (in a tmp directory), then handle - each queried variable which is in a separate netcdf file. A dataframe is created with all the variables - and then finally we join codes and departments with geopandas. - - Args: - date (str) - - Returns: - pd.DataFrame - """ - - year, month, day = date.split("-") - date_concat = date.replace("-", "") - with tempfile.TemporaryDirectory() as tmp: - call_fwi(tmp, year, month, day) - - file = zipfile.ZipFile(os.path.join(tmp, f"fwi_{year}_{month}_{day}.zip")) - file.extractall(path=os.path.join(tmp, f"fwi_{year}_{month}_{day}")) - - df0 = pd.DataFrame({}) - for var_name in ["BUI", "DC", "DMC", "DSR", "FFMC", "FWI", "ISI"]: - var_path = os.path.join( - tmp, - f"fwi_{year}_{month}_{day}/ECMWF_FWI_{var_name}_{date_concat}_1200_hr_v3.1_int.nc", - ) - nc = Dataset(var_path, "r") - lats = nc.variables["latitude"][:] - var = nc.variables[var_name.lower()][:] - nc.close() - - lons = np.arange(-180, 180.25, 0.25) - var_cyclic = np.ma.hstack([var[0][:, 720:], var[0][:, :721]]) - lon2d, lat2d = np.meshgrid(lons, lats) - df = pd.DataFrame( - { - "latitude": lat2d.flatten(), - "longitude": lon2d.flatten(), - var_name.lower(): var_cyclic.flatten(), - } - ) - df = df.dropna(subset=[var_name.lower()]) - df = df.reset_index(drop=True) - if var_name == "BUI": - df0 = pd.concat([df0, df], axis=1) - else: - df0 = pd.merge(df0, df, on=["latitude", "longitude"], how="inner") - geo_data = gpd.GeoDataFrame( - df0, - geometry=gpd.points_from_xy(df0["longitude"], df0["latitude"]), - crs="EPSG:4326", - ) - geo_masks = get_french_geom() - geo_df = gpd.sjoin(geo_masks, geo_data, how="inner") - return geo_df.drop(["index_right", "geometry"], axis=1) - - -def get_fwi_data_for_predict(date: str) -> pd.DataFrame: - """Run CDS API queries for dates required by the model and return fwi dataset for predict step. - - This takes care principally of the lags required for the modelling step. - - Args: - date (str) - - Returns: - pd.DataFrame - """ - data = get_fwi_from_api(date) - data["day"] = date - # Lag J-1 - lag = np.datetime64(date) - np.timedelta64(1, "D") - dataJ1 = get_fwi_from_api(str(lag)) - dataJ1["day"] = str(lag) - # Lag J-3 - lag = np.datetime64(date) - np.timedelta64(3, "D") - dataJ3 = get_fwi_from_api(str(lag)) - dataJ3["day"] = str(lag) - # Lag J-7 - lag = np.datetime64(date) - np.timedelta64(7, "D") - dataJ7 = get_fwi_from_api(str(lag)) - dataJ7["day"] = str(lag) - merged_data = pd.concat([data, dataJ1, dataJ3, dataJ7], ignore_index=True) - return merged_data - - -def get_fwi_data(source_path: str, day: Optional[str] = "20190101") -> pd.DataFrame: - """Load and handle netcdf data for selected day. - - Return pandas dataframe with longitude, latitude, day and fwi indices - (fwi, ffmc, dmc, dc, isi, bui, dsr, dr). - Args: - source_path (str): path with unzipped netcdf fwi data, usually got from load_data. - day (str, optional): which day to load. Defaults to '20190101'. - - Returns: - pd.DataFrame: dataframe with all fwi indices for selected day - """ - nc = Dataset( - os.path.join(source_path, "fwi_unzipped/JRC_FWI_{}.nc".format(day)), "r" - ) - try: - lons = nc.variables["lon"][:] - lats = nc.variables["lat"][:] - fwi = nc.variables["fwi"][:] - ffmc = nc.variables["ffmc"][:] - dmc = nc.variables["dmc"][:] - dc = nc.variables["dc"][:] - isi = nc.variables["isi"][:] - bui = nc.variables["bui"][:] - dsr = nc.variables["dsr"][:] - dr = nc.variables["danger_risk"][:] - except KeyError: - print("Some reading error with: ", day) - nc.close() - - lon2d, lat2d = np.meshgrid(lons, lats) - - df = pd.DataFrame( - { - "latitude": lat2d.flatten(), - "longitude": lon2d.flatten(), - "day": day, - "fwi": fwi[0, :, :].flatten(), - "ffmc": ffmc[0, :, :].flatten(), - "dmc": dmc[0, :, :].flatten(), - "dc": dc[0, :, :].flatten(), - "isi": isi[0, :, :].flatten(), - "bui": bui[0, :, :].flatten(), - "dsr": dsr[0, :, :].flatten(), - "dr": dr[0, :, :].flatten(), - } - ) - df = df.dropna(subset=["fwi", "ffmc", "dmc", "dc", "isi", "bui", "dsr", "dr"]) - df = df.reset_index(drop=True) - return df - - -def create_departement_df(day_data: pd.DataFrame) -> pd.DataFrame: - """Create dataframe with lon, lat coordinates and corresponding departments. - - Load json with the department polygons and run function include_department to get the - name of departments corresponding to each row of input data, typically one day of FWI data - got with load_data. This may take a few minutes due to the shapely process. - Args: - day_data (pd.Dataframe): df with longitudes and latitudes - - Returns: - pd.DataFrame: dataframe with lat, lon and department - """ - df = day_data.copy() - - with urllib.request.urlopen(cfg.FR_GEOJSON) as url: - dep_polygons = json.loads(url.read().decode()) - - deps = [include_department(df.iloc[i], dep_polygons) for i in range(df.shape[0])] - df["departement"] = deps - df = df[df["departement"] != ""] - dep_geo_df = df[["latitude", "longitude", "departement"]] - return dep_geo_df - - -class GwisFwi(pd.DataFrame): - """GWIS FWI dataframe (8 km resolution) on French territory based on 2019-2020 data.""" - - def __init__(self, days_list: Optional[List[str]] = None) -> None: - """Create dataframe with fwi indices data corresponding to days_list and join french department. - - Args: - days_list: list of str, format year month day (all concatenated) - """ - days_list = ["20190101"] if days_list is None else days_list - fwi_df = pd.DataFrame( - columns=[ - "latitude", - "longitude", - "day", - "fwi", - "ffmc", - "dmc", - "dc", - "isi", - "bui", - "dsr", - "dr", - ] - ) - with tempfile.TemporaryDirectory() as tmp: - load_data(output_path=tmp) - for day in days_list: - df = get_fwi_data(source_path=tmp, day=day) - fwi_df = pd.concat([fwi_df, df]) - dep_geo_df = create_departement_df(df) - fwi_df = pd.merge(fwi_df, dep_geo_df, on=["latitude", "longitude"]) - super().__init__(fwi_df) diff --git a/pyro_risks/datasets/masks.py b/pyro_risks/datasets/masks.py deleted file mode 100644 index 17036e1..0000000 --- a/pyro_risks/datasets/masks.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import logging -import geopandas as gpd -from typing import Optional - -from pyro_risks import config as cfg - - -__all__ = ["get_french_geom"] - - -def get_french_geom(path: Optional[str] = None) -> gpd.GeoDataFrame: - """Creates the dataframe with the geometry of French departments - - Args: - path: optional path to your local geojson - """ - if isinstance(path, str): - return gpd.read_file(path) - else: - try: - return gpd.read_file(cfg.FR_GEOJSON) - except Exception: - logging.warning(f"Unable to access {cfg.FR_GEOJSON}, trying fallback.") - return gpd.read_file(cfg.FR_GEOJSON_FALLBACK) diff --git a/pyro_risks/datasets/nasa_wildfires.py b/pyro_risks/datasets/nasa_wildfires.py deleted file mode 100644 index a6ba398..0000000 --- a/pyro_risks/datasets/nasa_wildfires.py +++ /dev/null @@ -1,243 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import logging -from typing import List, Optional - -import geopandas as gpd -import pandas as pd - -from pyro_risks import config as cfg - -__all__ = ["NASAFIRMS", "NASAFIRMS_VIIRS"] - -from .masks import get_french_geom - - -class NASAFIRMS(pd.DataFrame): - """Wildfire history dataset on French territory, using data from - NASA satellites. Accessible by completing the form at - https://effis.jrc.ec.europa.eu/applications/data-request-form/ - - Careful when completing the form, you can either choose to get the - dataset in json format or xlsx format. - However if your source data is in a csv format, you can still use - this class to clean it using the parameter `fmt`. - - By default, the format is considered to be json. - - Args: - source_path: str - Path or URL to your version of the source data - fmt: str - Format of the source data, can either be "csv", "xlsx" - or "json". Default is "json". - use_cols: List[str] - List of columns to read from the source - """ - - kept_cols = [ - "latitude", - "longitude", - "acq_date", - "acq_time", - "confidence", - "bright_t31", - "frp", - ] - fmt = "json" - - def __init__( - self, - source_path: Optional[str] = None, - fmt: Optional[str] = None, - use_cols: Optional[List[str]] = None, - ) -> None: - """ - Args: - source_path: Optional[str] - Path or URL to your version of the source data - fmt: Optional[str] - Format of the source data, can either be - "csv", "xlsx" or "json". - use_cols: Optional[List[str]] - List of columns to keep in the dataframe - """ - if not isinstance(source_path, str): - # Download in cache - logging.warning( - f"No data source specified for {self.__class__.__name__}, trying fallback." - ) - source_path = cfg.FR_NASA_FIRMS_FALLBACK - if not isinstance(fmt, str): - fmt = self.fmt - if not isinstance(use_cols, list): - use_cols = self.kept_cols - - if fmt == "json": - data = pd.read_json(source_path, orient="records") - data = pd.json_normalize(data["features"]) - # remove unnecessary prefix - data.columns = [col.split(".")[-1] for col in data.columns] - # keep defined columns - data = data[use_cols] - - elif fmt == "xlsx": - data = pd.read_excel(source_path, usecols=use_cols) - - elif fmt == "csv": - data = pd.read_csv(source_path, usecols=use_cols) - # if csv format, the `acq_time` column needs to be changed - # the raw data as the format "HHMM", we will transform it - # so that it has the format "HHMMSS" - # convert type to str - data["acq_time"] = data["acq_time"].astype(str) - # fill with 0 - data["acq_time"] = data["acq_time"].str.ljust(6, "0") - # prepare for datetime needs - data["acq_time"] = data["acq_time"].apply( - lambda s: ":".join(map("{}{}".format, *(s[::2], s[1::2]))) - ) - - else: - raise ValueError( - "The given format cannot be read, it should be either csv, xlsx or json." - ) - - data["acq_date_time"] = ( - data["acq_date"].astype(str) + " " + data["acq_time"].astype(str) - ) - data["acq_date"] = pd.to_datetime( - data["acq_date"], format="%Y-%m-%d", errors="coerce" - ) - data["acq_date_time"] = pd.to_datetime( - data["acq_date_time"], format="%Y-%m-%d %H:%M:%S", errors="coerce" - ) - data["latitude"] = data["latitude"].astype(float) - data["longitude"] = data["longitude"].astype(float) - data["bright_t31"] = data["bright_t31"].astype(float) - data["frp"] = data["frp"].astype(float) - - # add departements geometry to allow for departements merging - geo_data = gpd.GeoDataFrame( - data, - geometry=gpd.points_from_xy(data["longitude"], data["latitude"]), - crs="EPSG:4326", - ) - # Match the polygons using the ones of each predefined country area - geo_masks = get_french_geom() - geo_df = gpd.sjoin(geo_masks, geo_data, how="inner") - super().__init__(geo_df.drop(["acq_time", "index_right", "geometry"], axis=1)) - - -class NASAFIRMS_VIIRS(pd.DataFrame): - """Wildfire history dataset on French territory, using data from - VIIRS. - - Args: - source_path: str - Path or URL to your version of the source data - fmt: str - Format of the source data, can either be "csv", "xlsx" - or "json". Default is "json". - use_cols: List[str] - List of columns to read from the source - """ - - kept_cols = [ - "latitude", - "longitude", - "acq_date", - "acq_time", - "confidence", - "bright_ti4", - "bright_ti5", - "frp", - "type", - ] - fmt = "csv" - - def __init__( - self, - source_path: Optional[str] = None, - fmt: Optional[str] = None, - use_cols: Optional[List[str]] = None, - ) -> None: - """ - Args: - source_path: Optional[str] - Path or URL to your version of the source data - fmt: Optional[str] - Format of the source data, can either be - "csv", "xlsx" or "json". - use_cols: Optional[List[str]] - List of columns to keep in the dataframe - """ - if not isinstance(source_path, str): - # Download in cache - logging.warning( - f"No data source specified for {self.__class__.__name__}, trying fallback." - ) - source_path = cfg.FR_NASA_VIIRS_FALLBACK - if not isinstance(fmt, str): - fmt = self.fmt - if not isinstance(use_cols, list): - use_cols = self.kept_cols - - if fmt == "json": - data = pd.read_json(source_path, orient="records") - data = pd.json_normalize(data["features"]) - # remove unnecessary prefix - data.columns = [col.split(".")[-1] for col in data.columns] - # keep defined columns - data = data[use_cols] - - elif fmt == "xlsx": - data = pd.read_excel(source_path, usecols=use_cols) - - elif fmt == "csv": - data = pd.read_csv(source_path, usecols=use_cols) - # if csv format, the `acq_time` column needs to be changed - # the raw data as the format "HHMM", we will transform it - # so that it has the format "HHMMSS" - # convert type to str - data["acq_time"] = data["acq_time"].astype(str) - # fill with 0 - data["acq_time"] = data["acq_time"].str.ljust(6, "0") - # prepare for datetime needs - data["acq_time"] = data["acq_time"].apply( - lambda s: ":".join(map("{}{}".format, *(s[::2], s[1::2]))) - ) - - else: - raise ValueError( - "The given format cannot be read, it should be either csv, xlsx or json." - ) - - data["acq_date_time"] = ( - data["acq_date"].astype(str) + " " + data["acq_time"].astype(str) - ) - data["acq_date"] = pd.to_datetime( - data["acq_date"], format="%Y-%m-%d", errors="coerce" - ) - data["acq_date_time"] = pd.to_datetime( - data["acq_date_time"], format="%Y-%m-%d %H:%M:%S", errors="coerce" - ) - data["latitude"] = data["latitude"].astype(float) - data["longitude"] = data["longitude"].astype(float) - data["bright_ti4"] = data["bright_ti4"].astype(float) - data["bright_ti5"] = data["bright_ti5"].astype(float) - data["frp"] = data["frp"].astype(float) - - # add departements geometry to allow for departements merging - geo_data = gpd.GeoDataFrame( - data, - geometry=gpd.points_from_xy(data["longitude"], data["latitude"]), - crs="EPSG:4326", - ) - # Match the polygons using the ones of each predefined country area - geo_masks = get_french_geom() - geo_df = gpd.sjoin(geo_masks, geo_data, how="inner") - super().__init__(geo_df.drop(["acq_time", "index_right", "geometry"], axis=1)) diff --git a/pyro_risks/datasets/queries_api.py b/pyro_risks/datasets/queries_api.py deleted file mode 100644 index 79e43eb..0000000 --- a/pyro_risks/datasets/queries_api.py +++ /dev/null @@ -1,464 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import cdsapi -import os -import logging -import urllib3 - -from pyro_risks import config as cfg - - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -logger = logging.getLogger("uvicorn.info") - - -def call_era5land(output_path: str, year: str, month: str, day: str) -> None: - """Call cdpaspi to get ERA5Land data as file nc format for given date. - - By default "time" = "14:00". It is not an issue since we get these ERA5 Land data - with a 2 months delay. - - Args: - output_path: str - year: str - month: str - day: str - """ - file_path = os.path.join(output_path, f"era5land_{year}_{month}_{day}.nc") - - if os.path.exists(file_path): - logger.info(f"Using cached {file_path}") - return - - c = cdsapi.Client(url=cfg.CDS_URL, key=f"{cfg.CDS_UID}:{cfg.CDS_API_KEY}", verify=0) - - c.retrieve( - "reanalysis-era5-land", - { - "variable": [ - "10m_u_component_of_wind", - "10m_v_component_of_wind", - "2m_dewpoint_temperature", - "2m_temperature", - "evaporation_from_bare_soil", - "evaporation_from_open_water_surfaces_excluding_oceans", - "evaporation_from_the_top_of_canopy", - "evaporation_from_vegetation_transpiration", - "forecast_albedo", - "lake_bottom_temperature", - "lake_ice_depth", - "lake_ice_temperature", - "lake_mix_layer_depth", - "lake_mix_layer_temperature", - "lake_shape_factor", - "lake_total_layer_temperature", - "leaf_area_index_high_vegetation", - "leaf_area_index_low_vegetation", - "potential_evaporation", - "runoff", - "skin_reservoir_content", - "skin_temperature", - "snow_albedo", - "snow_cover", - "snow_density", - "snow_depth", - "snow_depth_water_equivalent", - "snow_evaporation", - "snowfall", - "snowmelt", - "soil_temperature_level_1", - "soil_temperature_level_2", - "soil_temperature_level_3", - "soil_temperature_level_4", - "sub_surface_runoff", - "surface_latent_heat_flux", - "surface_net_solar_radiation", - "surface_net_thermal_radiation", - "surface_pressure", - "surface_runoff", - "surface_sensible_heat_flux", - "surface_solar_radiation_downwards", - "surface_thermal_radiation_downwards", - "temperature_of_snow_layer", - "total_evaporation", - "total_precipitation", - "volumetric_soil_water_layer_1", - "volumetric_soil_water_layer_2", - "volumetric_soil_water_layer_3", - "volumetric_soil_water_layer_4", - ], - "year": year, - "month": month, - "day": day, - "time": "14:00", - "area": [ - 51, - -6, - 41, - 10, - ], - "format": "netcdf", - }, - file_path, - ) - - -def call_era5t(output_path: str, year: str, month: str, day: str) -> None: - """Call cdpaspi to get ERA5T data as file nc format for given date. - - Most recent available data is Day -5. - By default "time" = "14:00". It is not an issue since we get these ERA5T data - with a 5 days delay. - - Args: - output_path: str - year: str - month: str - day: str - """ - file_path = os.path.join(output_path, f"era5t_{year}_{month}_{day}.nc") - - if os.path.exists(file_path): - logger.info(f"Using cached {file_path}") - return - - c = cdsapi.Client(url=cfg.CDS_URL, key=f"{cfg.CDS_UID}:{cfg.CDS_API_KEY}", verify=0) - - c.retrieve( - "reanalysis-era5-single-levels", - { - "product_type": "reanalysis", - "variable": [ - "100m_u_component_of_wind", - "100m_v_component_of_wind", - "10m_u_component_of_neutral_wind", - "10m_u_component_of_wind", - "10m_v_component_of_neutral_wind", - "10m_v_component_of_wind", - "10m_wind_gust_since_previous_post_processing", - "2m_dewpoint_temperature", - "2m_temperature", - "air_density_over_the_oceans", - "altimeter_corrected_wave_height", - "altimeter_range_relative_correction", - "altimeter_wave_height", - "angle_of_sub_gridscale_orography", - "anisotropy_of_sub_gridscale_orography", - "benjamin_feir_index", - "boundary_layer_dissipation", - "boundary_layer_height", - "charnock", - "clear_sky_direct_solar_radiation_at_surface", - "cloud_base_height", - "coefficient_of_drag_with_waves", - "convective_available_potential_energy", - "convective_inhibition", - "convective_precipitation", - "convective_rain_rate", - "convective_snowfall", - "convective_snowfall_rate_water_equivalent", - "downward_uv_radiation_at_the_surface", - "duct_base_height", - "eastward_gravity_wave_surface_stress", - "eastward_turbulent_surface_stress", - "evaporation", - "forecast_albedo", - "forecast_logarithm_of_surface_roughness_for_heat", - "forecast_surface_roughness", - "free_convective_velocity_over_the_oceans", - "friction_velocity", - "gravity_wave_dissipation", - "high_cloud_cover", - "high_vegetation_cover", - "ice_temperature_layer_1", - "ice_temperature_layer_2", - "ice_temperature_layer_3", - "ice_temperature_layer_4", - "instantaneous_10m_wind_gust", - "instantaneous_eastward_turbulent_surface_stress", - "instantaneous_large_scale_surface_precipitation_fraction", - "instantaneous_moisture_flux", - "instantaneous_northward_turbulent_surface_stress", - "instantaneous_surface_sensible_heat_flux", - "k_index", - "lake_bottom_temperature", - "lake_cover", - "lake_depth", - "lake_ice_depth", - "lake_ice_temperature", - "lake_mix_layer_depth", - "lake_mix_layer_temperature", - "lake_shape_factor", - "lake_total_layer_temperature", - "land_sea_mask", - "large_scale_precipitation", - "large_scale_precipitation_fraction", - "large_scale_rain_rate", - "large_scale_snowfall", - "large_scale_snowfall_rate_water_equivalent", - "leaf_area_index_high_vegetation", - "leaf_area_index_low_vegetation", - "low_cloud_cover", - "low_vegetation_cover", - "maximum_2m_temperature_since_previous_post_processing", - "maximum_individual_wave_height", - "maximum_total_precipitation_rate_since_previous_post_processing", - "mean_boundary_layer_dissipation", - "mean_convective_precipitation_rate", - "mean_convective_snowfall_rate", - "mean_direction_of_total_swell", - "mean_direction_of_wind_waves", - "mean_eastward_gravity_wave_surface_stress", - "mean_eastward_turbulent_surface_stress", - "mean_evaporation_rate", - "mean_gravity_wave_dissipation", - "mean_large_scale_precipitation_fraction", - "mean_large_scale_precipitation_rate", - "mean_large_scale_snowfall_rate", - "mean_northward_gravity_wave_surface_stress", - "mean_northward_turbulent_surface_stress", - "mean_period_of_total_swell", - "mean_period_of_wind_waves", - "mean_potential_evaporation_rate", - "mean_runoff_rate", - "mean_sea_level_pressure", - "mean_snow_evaporation_rate", - "mean_snowfall_rate", - "mean_snowmelt_rate", - "mean_square_slope_of_waves", - "mean_sub_surface_runoff_rate", - "mean_surface_direct_short_wave_radiation_flux", - "mean_surface_direct_short_wave_radiation_flux_clear_sky", - "mean_surface_downward_long_wave_radiation_flux", - "mean_surface_downward_long_wave_radiation_flux_clear_sky", - "mean_surface_downward_short_wave_radiation_flux", - "mean_surface_downward_short_wave_radiation_flux_clear_sky", - "mean_surface_downward_uv_radiation_flux", - "mean_surface_latent_heat_flux", - "mean_surface_net_long_wave_radiation_flux", - "mean_surface_net_long_wave_radiation_flux_clear_sky", - "mean_surface_net_short_wave_radiation_flux", - "mean_surface_net_short_wave_radiation_flux_clear_sky", - "mean_surface_runoff_rate", - "mean_surface_sensible_heat_flux", - "mean_top_downward_short_wave_radiation_flux", - "mean_top_net_long_wave_radiation_flux", - "mean_top_net_long_wave_radiation_flux_clear_sky", - "mean_top_net_short_wave_radiation_flux", - "mean_top_net_short_wave_radiation_flux_clear_sky", - "mean_total_precipitation_rate", - "mean_vertical_gradient_of_refractivity_inside_trapping_layer", - "mean_vertically_integrated_moisture_divergence", - "mean_wave_direction", - "mean_wave_direction_of_first_swell_partition", - "mean_wave_direction_of_second_swell_partition", - "mean_wave_direction_of_third_swell_partition", - "mean_wave_period", - "mean_wave_period_based_on_first_moment", - "mean_wave_period_based_on_first_moment_for_swell", - "mean_wave_period_based_on_first_moment_for_wind_waves", - "mean_wave_period_based_on_second_moment_for_swell", - "mean_wave_period_based_on_second_moment_for_wind_waves", - "mean_wave_period_of_first_swell_partition", - "mean_wave_period_of_second_swell_partition", - "mean_wave_period_of_third_swell_partition", - "mean_zero_crossing_wave_period", - "medium_cloud_cover", - "minimum_2m_temperature_since_previous_post_processing", - "minimum_total_precipitation_rate_since_previous_post_processing", - "minimum_vertical_gradient_of_refractivity_inside_trapping_layer", - "model_bathymetry", - "near_ir_albedo_for_diffuse_radiation", - "near_ir_albedo_for_direct_radiation", - "normalized_energy_flux_into_ocean", - "normalized_energy_flux_into_waves", - "normalized_stress_into_ocean", - "northward_gravity_wave_surface_stress", - "northward_turbulent_surface_stress", - "ocean_surface_stress_equivalent_10m_neutral_wind_direction", - "ocean_surface_stress_equivalent_10m_neutral_wind_speed", - "orography", - "peak_wave_period", - "period_corresponding_to_maximum_individual_wave_height", - "potential_evaporation", - "precipitation_type", - "runoff", - "sea_ice_cover", - "sea_surface_temperature", - "significant_height_of_combined_wind_waves_and_swell", - "significant_height_of_total_swell", - "significant_height_of_wind_waves", - "significant_wave_height_of_first_swell_partition", - "significant_wave_height_of_second_swell_partition", - "significant_wave_height_of_third_swell_partition", - "skin_reservoir_content", - "skin_temperature", - "slope_of_sub_gridscale_orography", - "snow_albedo", - "snow_density", - "snow_depth", - "snow_evaporation", - "snowfall", - "snowmelt", - "soil_temperature_level_1", - "soil_temperature_level_2", - "soil_temperature_level_3", - "soil_temperature_level_4", - "soil_type", - "standard_deviation_of_filtered_subgrid_orography", - "standard_deviation_of_orography", - "sub_surface_runoff", - "surface_latent_heat_flux", - "surface_net_solar_radiation", - "surface_net_solar_radiation_clear_sky", - "surface_net_thermal_radiation", - "surface_net_thermal_radiation_clear_sky", - "surface_pressure", - "surface_runoff", - "surface_sensible_heat_flux", - "surface_solar_radiation_downward_clear_sky", - "surface_solar_radiation_downwards", - "surface_thermal_radiation_downward_clear_sky", - "surface_thermal_radiation_downwards", - "temperature_of_snow_layer", - "toa_incident_solar_radiation", - "top_net_solar_radiation", - "top_net_solar_radiation_clear_sky", - "top_net_thermal_radiation", - "top_net_thermal_radiation_clear_sky", - "total_cloud_cover", - "total_column_cloud_ice_water", - "total_column_cloud_liquid_water", - "total_column_ozone", - "total_column_rain_water", - "total_column_snow_water", - "total_column_supercooled_liquid_water", - "total_column_water", - "total_column_water_vapour", - "total_precipitation", - "total_sky_direct_solar_radiation_at_surface", - "total_totals_index", - "trapping_layer_base_height", - "trapping_layer_top_height", - "type_of_high_vegetation", - "type_of_low_vegetation", - "u_component_stokes_drift", - "uv_visible_albedo_for_diffuse_radiation", - "uv_visible_albedo_for_direct_radiation", - "v_component_stokes_drift", - "vertical_integral_of_divergence_of_cloud_frozen_water_flux", - "vertical_integral_of_divergence_of_cloud_liquid_water_flux", - "vertical_integral_of_divergence_of_geopotential_flux", - "vertical_integral_of_divergence_of_kinetic_energy_flux", - "vertical_integral_of_divergence_of_mass_flux", - "vertical_integral_of_divergence_of_moisture_flux", - "vertical_integral_of_divergence_of_ozone_flux", - "vertical_integral_of_divergence_of_thermal_energy_flux", - "vertical_integral_of_divergence_of_total_energy_flux", - "vertical_integral_of_eastward_cloud_frozen_water_flux", - "vertical_integral_of_eastward_cloud_liquid_water_flux", - "vertical_integral_of_eastward_geopotential_flux", - "vertical_integral_of_eastward_heat_flux", - "vertical_integral_of_eastward_kinetic_energy_flux", - "vertical_integral_of_eastward_mass_flux", - "vertical_integral_of_eastward_ozone_flux", - "vertical_integral_of_eastward_total_energy_flux", - "vertical_integral_of_eastward_water_vapour_flux", - "vertical_integral_of_energy_conversion", - "vertical_integral_of_kinetic_energy", - "vertical_integral_of_mass_of_atmosphere", - "vertical_integral_of_mass_tendency", - "vertical_integral_of_northward_cloud_frozen_water_flux", - "vertical_integral_of_northward_cloud_liquid_water_flux", - "vertical_integral_of_northward_geopotential_flux", - "vertical_integral_of_northward_heat_flux", - "vertical_integral_of_northward_kinetic_energy_flux", - "vertical_integral_of_northward_mass_flux", - "vertical_integral_of_northward_ozone_flux", - "vertical_integral_of_northward_total_energy_flux", - "vertical_integral_of_northward_water_vapour_flux", - "vertical_integral_of_potential_and_internal_energy", - "vertical_integral_of_potential_internal_and_latent_energy", - "vertical_integral_of_temperature", - "vertical_integral_of_thermal_energy", - "vertical_integral_of_total_energy", - "vertically_integrated_moisture_divergence", - "volumetric_soil_water_layer_1", - "volumetric_soil_water_layer_2", - "volumetric_soil_water_layer_3", - "volumetric_soil_water_layer_4", - "wave_spectral_directional_width", - "wave_spectral_directional_width_for_swell", - "wave_spectral_directional_width_for_wind_waves", - "wave_spectral_kurtosis", - "wave_spectral_peakedness", - "wave_spectral_skewness", - "zero_degree_level", - ], - "year": year, - "month": month, - "day": day, - "time": "14:00", - "area": [ - 51, - -6, - 41, - 10, - ], - "format": "netcdf", - }, - file_path, - ) - # TODO : take only needed variables for the model - - -def call_fwi(output_path: str, year: str, month: str, day: str) -> None: - """Get data from Fire danger indices historical data from the Copernicus Climate Data Store. - - Information on FWI can be found here: - https://datastore.copernicus-climate.eu/c3s/published-forms/c3sprod/cems-fire-historical/Fire_In_CDS.pdf - - Please follow the instructions before using the CDS API: https://cds.climate.copernicus.eu/api-how-to - Most recent available data is Day -2 - - Args: - output_path: str - year: str - month: str - day: str - """ - - file_path = os.path.join(output_path, f"fwi_{year}_{month}_{day}.zip") - - if os.path.exists(file_path): - logger.info(f"Using cached {file_path}") - return - - c = cdsapi.Client(url=cfg.CDS_URL, key=f"{cfg.CDS_UID}:{cfg.CDS_API_KEY}", verify=0) - - c.retrieve( - "cems-fire-historical", - { - "format": "zip", - "dataset": "Intermediate dataset", - "year": year, - "month": month, - "version": "3.1", - "variable": [ - "build_up_index", - "danger_risk", - "drought_code", - "duff_moisture_code", - "fine_fuel_moisture_code", - "fire_daily_severity_rating", - "fire_weather_index", - "initial_fire_spread_index", - ], - "product_type": "reanalysis", - "day": day, - }, - file_path, - ) diff --git a/pyro_risks/datasets/utils.py b/pyro_risks/datasets/utils.py deleted file mode 100644 index c55a4c1..0000000 --- a/pyro_risks/datasets/utils.py +++ /dev/null @@ -1,370 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import requests -import os -import gzip -import tarfile -import shutil -import warnings - -from scipy import spatial -from typing import Tuple, Optional, List, Any - -from io import BytesIO -from datetime import datetime -from urllib.parse import urlparse -from zipfile import ZipFile - -import numpy as np -import pandas as pd - - -def get_intersection_range(ts1: pd.Series, ts2: pd.Series) -> pd.DatetimeIndex: - """Computes the intersecting date range of two series. - - Args: - ts1: time series - ts2: time series - """ - # Time span selection - time_range1 = max(ts1.min(), ts2.min()) - time_range2 = min(ts1.max(), ts2.max()) - if time_range1 > time_range2: - raise ValueError("Extracts do not have intersecting date range") - - return pd.date_range(time_range1, time_range2) - - -def find_closest_weather_station( - df_weather: pd.DataFrame, latitude: pd.DataFrame, longitude: pd.DataFrame -) -> int: - """ - The weather dataframe SHOULD contain a "STATION" column giving the id of - each weather station in the dataset. - - Args: - df_weather: pd.DataFrame - Dataframe of weather conditions - latitude: float - Latitude of the point to which we want to find the closest - weather station - longitude: float - Longitude of the point to which we want to find the closest - weather station - - Returns: int - Id of the closest weather station of the point (lat, lon) - - """ - if "STATION" not in df_weather.columns: - raise ValueError("STATION column is missing in given weather dataframe.") - - weather = df_weather.drop_duplicates(subset=["STATION", "LATITUDE", "LONGITUDE"]) - - zipped_station_lat_lon = zip( - weather["STATION"].values.tolist(), - weather["LATITUDE"].values.tolist(), - weather["LONGITUDE"].values.tolist(), - ) - list_station_lat_lon = list(zipped_station_lat_lon) - - reference_station = list_station_lat_lon[0][0] - latitude_0 = list_station_lat_lon[0][1] - longitude_0 = list_station_lat_lon[0][2] - - min_distance = np.sqrt( - (latitude - latitude_0) ** 2 + (longitude - longitude_0) ** 2 - ) - - for k in range(1, weather.shape[0]): - current_latitude = list_station_lat_lon[k][1] - current_longitude = list_station_lat_lon[k][2] - current_distance = np.sqrt( - (latitude - current_latitude) ** 2 + (longitude - current_longitude) ** 2 - ) - - if current_distance < min_distance: - min_distance = current_distance - reference_station = list_station_lat_lon[k][0] - - return int(reference_station) - - -def find_closest_location( - df_weather: pd.DataFrame, latitude: float, longitude: float -) -> Tuple[float, float]: - """ - For a given point (`latitude`, `longitude`), get the closest point which exists in `df_weather`. - This function is to be used when the user do not choose to use weather stations data but satellite data - e.g. ERA5 Land variables. - - Args: - df_weather: pd.DataFrame - Dataframe of land/weather conditions - latitude: float - Latitude of the point to which we want to find the closest point in `df_weather`. - longitude: float - Longitude of the point to which we want to find the closest in `df_weather`. - - Returns: Tuple(float, float) - Tuple of the closest weather point (closest_lat, closest_lon) of the point (lat, lon) - """ - if "STATION" in df_weather.columns: - raise ValueError( - "STATION is in the columns, should use `find_closest_weather_station`." - ) - - weather = df_weather.drop_duplicates(subset=["latitude", "longitude"]) - - zipped_points_lat_lon = zip( - weather["latitude"].values.tolist(), weather["longitude"].values.tolist() - ) - list_station_lat_lon = list(zipped_points_lat_lon) - - latitude_0 = list_station_lat_lon[0][0] - longitude_0 = list_station_lat_lon[0][1] - reference_point = (latitude_0, longitude_0) - - min_distance = np.sqrt( - (latitude - latitude_0) ** 2 + (longitude - longitude_0) ** 2 - ) - - for k in range(1, weather.shape[0]): - current_latitude = list_station_lat_lon[k][0] - current_longitude = list_station_lat_lon[k][1] - current_distance = np.sqrt( - (latitude - current_latitude) ** 2 + (longitude - current_longitude) ** 2 - ) - - if current_distance < min_distance: - min_distance = current_distance - reference_point = (current_latitude, current_longitude) - - return reference_point - - -def url_retrieve(url: str, timeout: Optional[float] = None) -> bytes: - """Retrives and pass the content of an URL request. - - Args: - url: URL to request - timeout: number of seconds before the request times out. Defaults to 4. - - Raises: - requests.exceptions.ConnectionError: - - Return: - Content of the response - """ - response = requests.get(url, timeout=timeout, allow_redirects=True) - if response.status_code != 200: - raise requests.exceptions.ConnectionError( - f"Error code {response.status_code} - could not download {url}" - ) - return response.content - - -def get_fname(url: str) -> Tuple[str, Optional[str], Optional[str]]: - """Find file name, extension and compression of an archive located by an URL. - - Args: - url: URL of the compressed archive - - Raises: - ValueError: if URL contains more than one extension - ValueError: if URL contains more than one compression format - - Returns: - A tuple containing the base file name, extension and compression format - """ - supported_compressions = ["tar", "gz", "zip"] - supported_extensions = ["csv", "geojson", "shp", "shx", "nc"] - - archive_name = urlparse(url).path.rpartition("/")[-1] - - base = archive_name.split(".")[0] - - list_extensions = list(set(supported_extensions) & set(archive_name.split("."))) - list_compressions = list(set(supported_compressions) & set(archive_name.split("."))) - - if len(list_extensions) == 0: - extension = None - elif len(list_extensions) == 1: - extension = list_extensions[0] - else: - raise ValueError(f"Error {url} contains more than one extension") - - if len(list_compressions) == 0: - compression = None - - elif len(list_compressions) == 1: - compression = list_compressions[0] - - elif len(list_compressions) == 2: - compression = "tar.gz" - - else: - raise ValueError(f"Error {url} contains more than one compression format") - - return (base, extension, compression) - - -def download( - url: str, - default_extension: str, - unzip: Optional[bool] = True, - destination: str = "./tmp", -) -> None: - """Helper function for downloading, unzipping and saving compressed file from a given URL. - - Args: - url: URL of the compressed archive - default_extension: extension of the archive - unzip: whether archive should be unzipped. Defaults to True. - destination: folder where the file should be saved. Defaults to '.'. - """ - base, extension, compression = get_fname(url) - content = url_retrieve(url) - - if unzip and compression == "zip": - os.makedirs(os.path.dirname(destination), exist_ok=True) - with ZipFile(BytesIO(content)) as zip_file: - zip_file.extractall(destination) - - elif unzip and compression == "tar.gz": - os.makedirs(os.path.dirname(destination), exist_ok=True) - with tarfile.open(fileobj=BytesIO(content), mode="r:gz") as tar_file: - tar_file.extractall(path=destination) - - elif unzip and compression == "gz": - file_name = ( - f"{base}.{extension}" - if extension is not None - else f"{base}.{default_extension}" - ) - full_path = os.path.join(destination, file_name) - os.makedirs(os.path.dirname(full_path), exist_ok=True) - with gzip.open(BytesIO(content)) as gzip_file, open( - full_path, "wb+" - ) as unzipped_file: - shutil.copyfileobj(gzip_file, unzipped_file) - - elif not unzip and compression is None: - file_name = ( - f"{base}.{extension}" - if extension is not None - else f"{base}.{default_extension}" - ) - full_path = os.path.join(destination, file_name) - os.makedirs(os.path.dirname(full_path), exist_ok=True) - with open(full_path, "wb+") as file: - file.write(content) - - elif not unzip and isinstance(compression, str): - file_name = f"{base}.{compression}" - full_path = os.path.join(destination, file_name) - os.makedirs(os.path.dirname(full_path), exist_ok=True) - with open(full_path, "wb+") as file: - file.write(content) - - else: - raise ValueError("If the file is not compressed set unzip to False") - - -def get_ghcn( - start_year: Optional[int] = None, - end_year: Optional[int] = None, - destination: str = "./ghcn", -) -> None: - """Download yearly Global Historical Climatology Network - Daily (GHCN-Daily) (.csv) From (NCEI). - - Args: - start_year: first year to be retrieved. Defaults to None. - end_year: first that will not be retrieved. Defaults to None. - destination: destination directory. Defaults to './ghcn'. - """ - start_year = datetime.now().year if start_year is None else start_year - end_year = ( - datetime.now().year + 1 - if end_year is None or start_year == end_year - else end_year - ) - - for year in range(start_year, end_year): - url = f"https://www1.ncdc.noaa.gov/pub/data/ghcn/daily/by_year/{year}.csv.gz" - download(url=url, default_extension="csv", unzip=True, destination=destination) - - -def get_modis( - start_year: Optional[int] = None, - end_year: Optional[int] = None, - yearly: Optional[bool] = False, - destination: str = "./firms", -) -> None: - """Download last 24H or yearly France active fires from the FIRMS NASA. - Args: - start_year: first year to be retrieved. Defaults to None. - end_year: first that will not be retrieved. Defaults to None. - yearly: whether to download yearly active fires or not. Defaults to False. - destination: destination directory. Defaults to './firms'.] - """ - if yearly is True: - start_year = datetime.now().year - 1 if start_year is None else start_year - end_year = ( - datetime.now().year - if end_year is None or start_year == end_year - else end_year - ) - - for year in range(start_year, end_year): - assert ( - start_year != 2020 or end_year != 2021 - ), "MODIS active fire archives are only available for the years from 2000 to 2019" - url = f"https://firms.modaps.eosdis.nasa.gov/data/country/modis/{year}/modis_{year}_France.csv" - download( - url=url, default_extension="csv", unzip=False, destination=destination - ) - - else: - if start_year is not None: - raise BaseException( - warnings.warn( - "The active fires from the last 24H of the MODIS Satellite will be download." - ) - ) # type: ignore - else: - url = "https://firms.modaps.eosdis.nasa.gov/data/active_fire/c6/csv/MODIS_C6_Europe_24h.csv" - download( - url=url, default_extension="csv", unzip=False, destination=destination - ) - - -def get_nearest_points( - source_points: List[Tuple[Any, Any]], candidates: List[Tuple[Any, Any]] -) -> Tuple: - """ - Find nearest neighbor for all source points from a set of candidate points - using KDTree algorithm. - - Args: - source_points: List[Tuple] - List of tuples (lat, lon) for which you want to find the closest point in candidates. - candidates: List[Tuple] - List of tuples (lat, lon) which are all possible closest points. - - Returns: Tuple - indices : array of integers - The locations of the neighbors in candidates. - distances : array of floats - The distances to the nearest neighbors.. - """ - # Create tree from the candidate points - tree = spatial.cKDTree(candidates) - - # Find closest points and distances - distances, indices = tree.query(source_points, k=1) - - return indices, distances diff --git a/pyro_risks/datasets/weather.py b/pyro_risks/datasets/weather.py deleted file mode 100644 index d510abd..0000000 --- a/pyro_risks/datasets/weather.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import logging -import pandas as pd -import geopandas as gpd -from typing import List, Optional - -from pyro_risks import config as cfg -from .masks import get_french_geom - -__all__ = ["NOAAWeather"] - - -class NOAAWeather(pd.DataFrame): - """Weather dataset on French territory, accessible upon request to NOAA. Requests are to be made at: - https://www.ncdc.noaa.gov/cdo-web. - - Args: - source_path: path or URL to your version of the source data - use_cols: columns to read from source - """ - - kept_cols = [ - "STATION", - "DATE", - "LATITUDE", - "LONGITUDE", - "ELEVATION", - "DEWP", - "DEWP_ATTRIBUTES", - "FRSHTT", - "GUST", - "MAX", - "MIN", - "MXSPD", - "PRCP", - "SLP", - "SLP_ATTRIBUTES", - "SNDP", - "STP", - "STP_ATTRIBUTES", - "TEMP", - "TEMP_ATTRIBUTES", - "VISIB", - "VISIB_ATTRIBUTES", - "WDSP", - "WDSP_ATTRIBUTES", - ] - - def __init__( - self, source_path: Optional[str] = None, use_cols: Optional[List[str]] = None - ) -> None: - if not isinstance(source_path, str): - # Download in cache - logging.warning( - f"No data source specified for {self.__class__.__name__}, trying fallback." - ) - source_path = cfg.FR_WEATHER_FALLBACK - if not isinstance(use_cols, list): - use_cols = self.kept_cols - data = pd.read_csv(source_path, usecols=use_cols) - geo_df = gpd.GeoDataFrame( - data, - geometry=gpd.points_from_xy(data["LONGITUDE"], data["LATITUDE"]), - crs="EPSG:4326", - ) - # Match the polygons using the ones of each predefined country area - geo_masks = get_french_geom() - geo_data = gpd.sjoin(geo_masks, geo_df, how="inner") - # Drop NA - geo_data = geo_data.dropna(axis=1) - # Convert - geo_data["DATE"] = pd.to_datetime( - geo_data["DATE"], format="%Y-%m-%d", errors="coerce" - ) - # Drop Cols - super().__init__(geo_data.drop(["index_right", "geometry"], axis=1)) diff --git a/pyro_risks/datasets/wildfires.py b/pyro_risks/datasets/wildfires.py deleted file mode 100644 index e8c9f57..0000000 --- a/pyro_risks/datasets/wildfires.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import logging -import pandas as pd -from typing import List, Optional - -from pyro_risks import config as cfg - - -__all__ = ["BDIFFHistory"] - - -class BDIFFHistory(pd.DataFrame): - """Wildfire history dataset on French territory, accessible at https://bdiff.agriculture.gouv.fr/. - - Args: - source_path: path or URL to your version of the source data - use_cols: columns to read from source - """ - - kept_cols = ["Date de première alerte", "Département", "Statut"] - - def __init__( - self, source_path: Optional[str] = None, use_cols: Optional[List[str]] = None - ) -> None: - if not isinstance(source_path, str): - # Download in cache - logging.warning( - f"No data source specified for {self.__class__.__name__}, trying fallback." - ) - source_path = cfg.FR_FIRES_FALLBACK - if not isinstance(use_cols, list): - use_cols = self.kept_cols - data = pd.read_csv(source_path, sep=";", usecols=use_cols) - tmp = pd.to_datetime( - data["Date de première alerte"], format="%Y-%m-%d %H:%M:%S", errors="coerce" - ) - data["date"] = tmp.dt.normalize() # Set time to 00:00:00 for each entry - # Drop Cols - super().__init__(data.drop(["Date de première alerte"], axis=1)) diff --git a/pyro_risks/main.py b/pyro_risks/main.py deleted file mode 100644 index 26bae51..0000000 --- a/pyro_risks/main.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -# type: ignore -from pyro_risks.datasets.utils import download -from pyro_risks.pipeline import load_dataset, train_pipeline, evaluate_pipeline -from pyro_risks.pipeline import PyroRisk -from datetime import date - -import pyro_risks.config as cfg -import click - - -@click.group() -def main(): - pass - - -@main.group(name="download") -def download_main(): - pass - - -@download_main.command(name="dataset") -@click.option("--url", default=cfg.ERA5T_VIIRS_PIPELINE, help="Dataset URL") -@click.option( - "--extension", "default_extension", default="csv", help="Dataset file extension" -) -@click.option( - "--unzip", - is_flag=True, - default=False, - help="Wether the dataset file should be unzip or not", -) -@click.option( - "--destination", default=cfg.DATA_REGISTRY, help="Dataset registry local path" -) -def _download_dataset(url: str, default_extension: str, unzip: bool, destination: str): - click.echo(f"Download {cfg.DATASET} dataset in {destination}") - download( - url=url, - default_extension=default_extension, - unzip=unzip, - destination=destination, - ) - - -@download_main.command(name="inputs") -@click.option("--day", help="Date of interest (%Y-%m-%d) for example 2020-05-05") -@click.option("--country", default="France", help="Country of interest") -@click.option( - "--directory", default=cfg.PREDICTIONS_REGISTRY, help="Dataset registry local path" -) -def _download_inputs(day: str, country: str, directory: str): - day = day if day is not None else date.today().strftime("%Y-%m-%d") - pyrorisk = PyroRisk() - location = "default directory" if directory is None else directory - click.echo(f"Download inputs in {location} to fire risks in {country} on {day}") - pyrorisk.get_inputs(day=day, country=country, dir_destination=directory) - click.echo("The fire risks inputs are downloaded") - - -@main.command(name="train") -@click.option("--model", help="Classification Pipeline name RF, XGBOOST") -@click.option( - "--destination", - default=cfg.MODEL_REGISTRY, - help="Destination folder for persisting pipeline.", -) -@click.option( - "--ignore_prints/--print", is_flag=True, help="Whether to print results or not." -) -@click.option( - "--ignore_html/--html", is_flag=True, help="Persist pipeline html description." -) -def _train_pipeline( - model: str, destination: str, ignore_prints: bool, ignore_html: bool -) -> None: - click.echo(f"Train and save pipeline in {destination}") - X, y = load_dataset() - train_pipeline( - X=X, - y=y, - model=model, - destination=destination, - ignore_prints=ignore_prints, - ignore_html=ignore_html, - ) - - -@main.command(name="evaluate") -@click.option("--pipeline", help="Pipeline location path.") -@click.option("--threshold", help="Classification pipeline optimal threshold path.") -@click.option("--prefix", help="Classification reports prefix i.e. pipeline name.") -@click.option( - "--destination", - default=cfg.METADATA_REGISTRY, - help="Folder where the report should be saved.", -) -def _evaluate_pipeline( - pipeline: str, threshold: str, prefix: str, destination: str -) -> None: - click.echo(f"Evaluate and save pipeline performance metrics in {destination}") - X, y = load_dataset() - evaluate_pipeline( - X=X, - y=y, - pipeline=pipeline, - threshold=threshold, - prefix=prefix, - destination=destination, - ) - - -@main.command(name="predict") -@click.option( - "--model", - default="RF", - help="trained pipeline from pyrorisks remote model default to RF", -) -@click.option("--day", help="Date of interest (%Y-%m-%d) for example 2020-05-05") -@click.option("--country", default="France", help="Country of interest") -@click.option("--zone", default=cfg.ZONE_VAR, help="Territorial unit variable") -@click.option( - "--directory", - default=cfg.PREDICTIONS_REGISTRY, - help="Predictions registry local path", -) -def _predict(model: str, day: str, country: str, zone: str, directory: str): - day = day if day is not None else date.today().strftime("%Y-%m-%d") - pyrorisk = PyroRisk(model=model) - click.echo(f"Start predictions with the trained {pyrorisk.model} pipeline") - pyrorisk.predict( - day=day, country=country, zone_column=zone, dir_destination=directory - ) - click.echo( - f"Predictions are persisted in {directory}{pyrorisk.model}_prediction_{country}_{day}.joblib" - ) - - -if __name__ == "__main__": - main() diff --git a/pyro_risks/models/__init__.py b/pyro_risks/models/__init__.py deleted file mode 100644 index c790e5f..0000000 --- a/pyro_risks/models/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .pipelines import * -from .transformers import * -from .utils import * diff --git a/pyro_risks/models/pipelines.py b/pyro_risks/models/pipelines.py deleted file mode 100644 index 1a2267b..0000000 --- a/pyro_risks/models/pipelines.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from imblearn.pipeline import Pipeline -from .transformers import ( - TargetDiscretizer, - CategorySelector, - Imputer, - LagTransformer, - FeatureSubsetter, -) -from .utils import discretizer - -from sklearn.ensemble import RandomForestClassifier - -from xgboost import XGBClassifier - -import pyro_risks.config as cfg - -__all__ = ["rf_pipeline", "xgb_pipeline"] - -# pipeline base steps definition -base_steps = [ - ( - "filter_dep", - CategorySelector(variable=cfg.ZONE_VAR, category=cfg.SELECTED_DEP), - ), - ( - "add_lags", - LagTransformer( - date_column=cfg.DATE_VAR, - zone_column=cfg.ZONE_VAR, - columns=cfg.LAG_ERA5T_VARS, - ), - ), - ("imputer", Imputer(columns=cfg.MODEL_ERA5T_VARS, strategy="median")), - ("binarize_target", TargetDiscretizer(discretizer=discretizer)), - ("subset_features", FeatureSubsetter(columns=cfg.MODEL_ERA5T_VARS)), -] - -# Add estimator to base step lists -xgb_steps = [*base_steps, ("xgboost", XGBClassifier(**cfg.XGB_PARAMS))] -rf_steps = [*base_steps, ("random_forest", RandomForestClassifier(**cfg.RF_PARAMS))] - -# Define sklearn / imblearn pipelines -xgb_pipeline = Pipeline(xgb_steps) -rf_pipeline = Pipeline(rf_steps) diff --git a/pyro_risks/models/transformers.py b/pyro_risks/models/transformers.py deleted file mode 100644 index 395d6df..0000000 --- a/pyro_risks/models/transformers.py +++ /dev/null @@ -1,349 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from typing import List, Union, Optional, Tuple, Callable -from sklearn.base import BaseEstimator, TransformerMixin -from sklearn.impute import SimpleImputer -from .utils import check_xy, check_x - -import pandas as pd -import numpy as np - - -class TargetDiscretizer(BaseEstimator): - """Discretize numerical target variable. - - The `TargetDiscretizer` transformer maps target variable values to discrete values using - a user defined function. - - Parameters: - discretizer: user defined function. - """ - - def __init__(self, discretizer: Callable) -> None: - if callable(discretizer): - self.discretizer = discretizer - else: - raise TypeError(f"{self.__class__.__name__} constructor expect a callable") - - def fit_resample( - self, X: pd.DataFrame, y: pd.Series - ) -> Tuple[pd.DataFrame, pd.Series]: - """Discretize the target variable. - - The `fit_resample` method allows for discretizing the target variable. - The method does not resample the dataset, the naming convention ensure - the compatibility of the transformer with imbalanced-learn `Pipeline` - object. - - Args: - X: Training dataset features - y: Training dataset target - - Returns: - Training dataset features and target tuple. - """ - X, y = check_xy(X, y) - - y = y.apply(self.discretizer) - - return X, y - - -class CategorySelector(BaseEstimator): - """Select features and targets rows. - - The `CategorySelector` transformer select features and targets rows - belonging to given variable categories. - - Parameters: - variable: variable to be used for selection. - category: modalities to be selected. - """ - - def __init__(self, variable: str, category: Union[str, list]) -> None: - self.variable = variable - # Catch or prevent key errors - if isinstance(category, str): - self.category = [category] - elif isinstance(category, list): - self.category = category - else: - raise TypeError( - f"{self.__class__.__name__} constructor category argument expect a string or a list" - ) - - def fit_resample( - self, X: pd.DataFrame, y: Optional[pd.Series] = None - ) -> Tuple[pd.DataFrame, pd.Series]: - """Select features and targets rows. - - The `fit_resample` method allows for selecting the features and target - rows. The method does not resample the dataset, the naming convention ensure - the compatibility of the transformer with imbalanced-learn `Pipeline` - object. - - Args: - X: Training dataset features - y: Training dataset target - - Returns: - Training dataset features and target tuple. - """ - if isinstance(X, pd.DataFrame) and isinstance(y, pd.Series): - mask = X[self.variable].isin(self.category) - XR = X[mask].copy() - yr = y[mask].copy() - - else: - raise TypeError( - f"{self.__class__.__name__} fit_resample methods expect pd.DataFrame and\ - pd.Series as inputs." - ) - - return XR, yr - - -class Imputer(SimpleImputer): - """Impute missing values. - - The `Imputer` transformer wraps scikit-learn SimpleImputer transformer. - - Parameters: - missing_values: the placeholder for the missing values. - strategy: the imputation strategy (mean, median, most_frequent, constant). - fill_value: fill_value is used to replace all occurrences of missing_values (default to 0). - verbose: controls the verbosity of the imputer. - copy: If True, a copy of X will be created. - add_indicator: If True, a MissingIndicator transform will stack onto output of the imputer’s transform. - """ - - def __init__( - self, - columns: list, - missing_values: Union[int, float, str] = np.nan, - strategy: str = "mean", - fill_value: float = None, - verbose: int = 0, - copy: bool = True, - add_indicator: bool = False, - ) -> None: - super().__init__( - missing_values=missing_values, - strategy=strategy, - fill_value=fill_value, - copy=copy, - add_indicator=add_indicator, - ) - - self.columns = columns - - def fit(self, X: pd.DataFrame, y: Optional[pd.Series] = None) -> "Imputer": - """ - Fit the imputer on X. - - Args: - X: Training dataset features. - y: Training dataset target. - - Returns: - Transformer. - """ - X, y = check_xy(X[self.columns], y) - - super().fit(X, y) - return self - - def transform(self, X: pd.DataFrame) -> pd.DataFrame: - """ - Impute X missing values. - - Args: - X: Training dataset features. - - Returns: - Transformed training dataset. - """ - X = check_x(X) - XS = check_x(X[self.columns]) - - X[self.columns] = super().transform(XS) - - return X - - -class LagTransformer(BaseEstimator, TransformerMixin): - """Add lags features of the selected columns. - - Lags added correspond to day -1, -3 and -7 and are added to each department separately. - - Parameters: - date_column: date column. - zone_columns: geographical zoning column. - columns: columns to add lag. - """ - - def __init__(self, date_column: str, zone_column: str, columns: List[str]) -> None: - self.date_column = date_column - self.zone_column = zone_column - self.columns = columns - - def fit(self, X: pd.DataFrame, y: Optional[pd.Series] = None) -> "LagTransformer": - """ - Fit the imputer on X. - - Args: - X: Training dataset features. - y: Training dataset target. - - Returns: - Transformer. - """ - X, y = check_xy(X, y) - - return self - - def transform(self, X: pd.DataFrame) -> pd.DataFrame: - """ - Create lag features. - - Args: - X: Training dataset features. - - Returns: - Transformed training dataset. - """ - X = check_x(X) - - if X[self.date_column].dtypes != "datetime64[ns]": - raise TypeError( - f"{self.__class__.__name__} transforme methods expect date_column of type datetime64[ns]" - ) - - for var in self.columns: - for dep in X[self.zone_column].unique(): - tmp = X[X[self.zone_column] == dep][[self.date_column, var]].set_index( - self.date_column - ) - tmp1 = tmp.copy() - tmp1 = tmp1.join( - tmp.shift(periods=1, freq="D"), rsuffix="_lag1", how="left" - ) - tmp1 = tmp1.join( - tmp.shift(periods=3, freq="D"), rsuffix="_lag3", how="left" - ) - tmp1 = tmp1.join( - tmp.shift(periods=7, freq="D"), rsuffix="_lag7", how="left" - ) - new_vars = [var + "_lag1", var + "_lag3", var + "_lag7"] - X.loc[X[self.zone_column] == dep, new_vars] = tmp1[new_vars].values - return X - - -class FeatureSelector(BaseEstimator, TransformerMixin): - """Select features correlated to the target. - - Select features with correlation to the target above the threshold. - - Parameters: - exclude: column to exclude from correlation calculation. - method: correlation matrix calculation method. - threshold: columns on which to add lags - """ - - def __init__( - self, exclude: List[str], method: str = "pearson", threshold: float = 0.15 - ) -> None: - self.exclude = exclude - self.method = method - self.threshold = threshold - - def fit(self, X: pd.DataFrame, y: Optional[pd.Series] = None) -> "FeatureSelector": - """Fit the FeatureSelector on X. - - Compute the correlation matrix. - - Args: - X: Training dataset features. - y: Training dataset target. - - Returns: - Transformer. - """ - X, y = check_xy(X, y) - self.target_correlation = ( - pd.concat([X, y], axis=1) - .corr(method=self.method) - .loc[y.name] - .apply(abs) - .sort_values(ascending=False) - ) - self.target_correlation = self.target_correlation[ - self.target_correlation.index != y.name - ] - - return self - - def transform(self, X: pd.DataFrame) -> pd.DataFrame: - """ - Select lag features. - - Args: - X: Training dataset features. - - Returns: - Transformed training dataset. - """ - X = check_x(X) - - mask = self.target_correlation > self.threshold - self.selected_features = self.target_correlation[mask].index.tolist() - - return X[self.selected_features] - - -class FeatureSubsetter(BaseEstimator, TransformerMixin): - """Subset dataframe's column. - - Subset any given of the dataframe. - - Parameters: - threshold: columns on which to add lags - """ - - def __init__(self, columns: List[str]) -> None: - self.columns = columns - - def fit(self, X: pd.DataFrame, y: Optional[pd.Series] = None) -> "FeatureSubsetter": - """Comply with pipeline requirements. - - The method does not fit the dataset, the naming convention ensure - the compatibility of the transformer with scikit-learn `Pipeline` - object. - - Args: - X: Training dataset features. - y: Training dataset target. - - Returns: - Transformer. - """ - X, y = check_xy(X, y) - - return self - - def transform(self, X: pd.DataFrame) -> pd.DataFrame: - """ - Select columns. - - Args: - X: Training dataset features. - - Returns: - Training dataset features subset. - """ - X = check_x(X) - - return X[self.columns] diff --git a/pyro_risks/models/utils.py b/pyro_risks/models/utils.py deleted file mode 100644 index 3356b5c..0000000 --- a/pyro_risks/models/utils.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from typing import Tuple -import pandas as pd - -__all__ = ["check_xy", "check_x", "discretizer"] - - -def check_xy(X: pd.DataFrame, y: pd.Series) -> Tuple[pd.DataFrame, pd.Series]: - """ - Validate inputs for transformers. - - Args: - X: Training dataset features pd.DataFrame. - y: Training dataset target pd.Series. - - Raises: - TypeError: Transformer methods expect pd.DataFrame and pd.Series as inputs. - - Returns: - Copy of the inputs. - """ - if isinstance(X, pd.DataFrame) and isinstance(y, pd.Series): - X = X.copy() - y = y.copy() - else: - raise TypeError( - "Transformer methods expect pd.DataFrame\ - and pd.Series as inputs." - ) - return X, y - - -def check_x(X: pd.DataFrame) -> pd.DataFrame: - """ - Validate inputs for tranformers. - - Args: - X: Training dataset features pd.DataFrame. - - Raises: - TypeError: Transformer methods expect pd.DataFrame as inputs. - - Returns: - Copy of the inputs. - """ - if isinstance(X, pd.DataFrame): - X = X.copy() - else: - raise TypeError("Transformer methods expect pd.DataFrame as inputs") - return X - - -def discretizer(x: float) -> int: - """ - Discretize values. - - Args: - x (float): value to be discretized - - Returns: - int: discretized value - """ - return 1 if x > 0 else 0 diff --git a/pyro_risks/pipeline/__init__.py b/pyro_risks/pipeline/__init__.py deleted file mode 100644 index f464890..0000000 --- a/pyro_risks/pipeline/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .load import * -from .train import * -from .predict import * -from .evaluate import * diff --git a/pyro_risks/pipeline/evaluate.py b/pyro_risks/pipeline/evaluate.py deleted file mode 100644 index 17434ef..0000000 --- a/pyro_risks/pipeline/evaluate.py +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from typing import Union, Optional -from datetime import datetime -from sklearn.model_selection import train_test_split -from sklearn.metrics import classification_report -from plot_metric.functions import BinaryClassification -from pyro_risks.models import discretizer -from pyro_risks.pipeline.load import load_dataset - -import matplotlib.pyplot as plt -import imblearn.pipeline as pp -import pyro_risks.config as cfg - -import pandas as pd -import numpy as np - -import sys -import os -import json -import joblib - -__all__ = [ - "save_classification_reports", - "save_classification_plots", - "evaluate_pipeline", -] - - -def save_classification_reports( - y_true: np.ndarray, - y_pred: np.ndarray, - prefix: Optional[str] = None, - destination: Optional[str] = None, -) -> None: - """ - Build and save binary classification metrics reports. - - Args: - y_true: Ground truth (correct) labels. - y_pred: Predicted labels, as returned by a calibrated classifier. - prefix: Classification report prefix i.e. pipeline name. Defaults to None. - destination: Folder where the report should be saved. Defaults to ``METADATA_REGISTRY``. - """ - destination = cfg.METADATA_REGISTRY if destination is None else destination - fname = ( - "classification_report" if prefix is None else prefix + "_classification_report" - ) - json_report_path = os.path.join(destination, fname + ".json") - csv_report_path = os.path.join(destination, fname + ".csv") - - report = classification_report(y_true, y_pred, output_dict=True) - - report.pop("accuracy") - report.pop("macro avg") - report.pop("weighted avg") - - # JSON report for tracking metrics - with open(json_report_path, "w") as fp: - json.dump(obj=report, fp=fp) - - # CSV report for plotting classification report - - pd.DataFrame(report).transpose().round(3).to_csv(csv_report_path) - - print(classification_report(y_true, y_pred)) - - -def save_classification_plots( - y_true: np.ndarray, - y_proba: np.ndarray, - threshold: np.float64, - prefix: Optional[str] = None, - destination: Optional[str] = None, -) -> None: - """ - Build and save binary classification performance evaluation plots. - - Args: - y_true: Ground truth (correct) labels. - y_pred: Predicted probabilities of the positive class returned by a classifier. - threshold: Classification pipeline optimal threshold. - prefix: Classification plots prefix i.e. pipeline name. Defaults to None. - destination: Folder where the report should be saved. Defaults to ``METADATA_REGISTRY``. - """ - destination = cfg.METADATA_REGISTRY if destination is None else destination - fname = ( - "classification_plots.png" - if prefix is None - else prefix + "_classification_plots.png" - ) - path = os.path.join(destination, fname) - - bc = BinaryClassification(y_true, y_proba, labels=["No fire", "Fire"]) - - plt.figure(figsize=(15, 10)) - plt.subplot2grid(shape=(2, 6), loc=(0, 0), colspan=2) - bc.plot_roc_curve(threshold=threshold) - plt.subplot2grid((2, 6), (0, 2), colspan=2) - bc.plot_precision_recall_curve(threshold=threshold) - plt.subplot2grid((2, 6), (0, 4), colspan=2) - bc.plot_class_distribution(threshold=threshold) - plt.subplot2grid((2, 6), (1, 1), colspan=2) - bc.plot_confusion_matrix(threshold=threshold) - plt.subplot2grid((2, 6), (1, 3), colspan=2) - bc.plot_confusion_matrix(threshold=threshold, normalize=True) - - plt.savefig(path) - - -def evaluate_pipeline( - X: pd.DataFrame, - y: pd.Series, - pipeline: Union[pp.Pipeline, str], - threshold: str, - prefix: Optional[str] = None, - destination: Optional[str] = None, -) -> None: - """ - Build and save binary classification evaluation reports. - - Args: - X: Training dataset features pd.DataFrame. - y: Training dataset target pd.Series. - pipeline: imbalanced-learn preprocessing pipeline or path to pipeline. - threshold: Classification pipeline optimal threshold path. - prefix: Classification reports prefix i.e. pipeline name. Defaults to None. - destination: Folder where the report should be saved. Defaults to ``METADATA_REGISTRY``. - """ - # setup - _, X_test, _, y_test = train_test_split( - X, y, test_size=cfg.TEST_SIZE, random_state=cfg.RANDOM_STATE - ) - - if not isinstance(pipeline, pp.Pipeline): - pipeline = joblib.load(pipeline) - - y_proba = pipeline.predict_proba(X_test) - - with open(threshold, "r") as file: - optimal_threshold = json.load(file) - - def predict(x): - return 1 if x > optimal_threshold["threshold"] else 0 - - vpredict = np.vectorize(predict) - vdiscretizer = np.vectorize(discretizer) - - y_pred = vpredict(y_proba[:, 1]) - y_test = vdiscretizer(y_test) - - save_classification_reports( - y_true=y_test, y_pred=y_pred, prefix=prefix, destination=destination - ) - - save_classification_plots( - y_true=y_test, - y_proba=y_proba[:, 1], - threshold=optimal_threshold["threshold"], - prefix=prefix, - destination=destination, - ) diff --git a/pyro_risks/pipeline/load.py b/pyro_risks/pipeline/load.py deleted file mode 100644 index d479699..0000000 --- a/pyro_risks/pipeline/load.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from typing import Optional, List -from pyro_risks.datasets.utils import download -from datetime import datetime -from typing import Tuple - -import pyro_risks.config as cfg -import pandas as pd -import os - -__all__ = ["load_dataset"] - - -def load_dataset( - url: Optional[str] = None, - path: Optional[str] = None, - usecols: Optional[List[str]] = None, - pipeline_cols: Optional[List[str]] = None, - destination: str = None, -) -> Tuple[pd.DataFrame, pd.Series]: - """ - Load Pyro Risks training datasets. - - Download and load Pyro Risks training datasets. - - Args: - url: Training dataset URL. Defaults to None. - path: Dataset full path. Defaults to None. - usecols: Subset of the dataset columns. Defaults to None. - pipeline_cols: Subset of the dataset used for training. Defaults to None. - destination: folder where the dataset should be saved. Defaults to None. - - Returns: - Tuple[pd.DataFrame, pd.Series] - """ - url = cfg.ERA5T_VIIRS_PIPELINE if url is None else url - path = os.path.join(cfg.DATA_REGISTRY, cfg.DATASET) if path is None else path - usecols = ( - [cfg.DATE_VAR, cfg.ZONE_VAR, cfg.TARGET] + cfg.PIPELINE_ERA5T_VARS - if usecols is None - else usecols - ) - pipeline_cols = ( - [cfg.DATE_VAR, cfg.ZONE_VAR] + cfg.PIPELINE_ERA5T_VARS - if pipeline_cols is None - else pipeline_cols - ) - destination = cfg.DATA_REGISTRY if destination is None else destination - - if not os.path.isfile(path): - download(url=url, default_extension="csv", unzip=False, destination=destination) - - df = pd.read_csv(path, usecols=usecols) - df["day"] = df["day"].apply( - lambda x: datetime.strptime(str(x), "%Y-%m-%d") if not pd.isnull(x) else x - ) - - X = df[pipeline_cols] - y = df[cfg.TARGET] - return X, y diff --git a/pyro_risks/pipeline/predict.py b/pyro_risks/pipeline/predict.py deleted file mode 100644 index 29d210e..0000000 --- a/pyro_risks/pipeline/predict.py +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -# type: ignore -from pyro_risks import config as cfg -from pyro_risks.datasets.fwi import get_fwi_data_for_predict -from pyro_risks.datasets.ERA5 import get_data_era5t_for_predict -from pyro_risks.datasets.era_fwi_viirs import process_dataset_to_predict -from typing import Optional, List -from io import BytesIO - -import pandas as pd -import dvc.api -import joblib -import logging -import os - - -__all__ = ["PyroRisk"] - - -class PyroRisk(object): - """ - Pyronear Wildfire Risk Forecaster - - Load a trained pipeline from pyrorisks remote model registry, download features from publicly - available data sources (CDS API). Forecast the local (NUTS 3 level) daily wildfire risks - (forest fire danger) in a Given Country (France). - - Args: - model: Can be 'RF' for random forest or 'XGBOOST' for xgboost. Defaults to 'RF'. - - Raises: - ValueError: Model can be only of type RF or XGBOOST - """ - - def __init__(self, model: Optional[str] = "RF") -> None: - self.inputs = None - self.model = model - self.pipeline = None - self.predictions = None - self.country = None - self.zone = None - self.predictions_registry = cfg.PREDICTIONS_REGISTRY - - if self.model == "RF": - self.model_path = cfg.RFMODEL_ERA5T_PATH # file path - elif self.model == "XGBOOST": - self.model_path = cfg.XGBMODEL_ERA5T_PATH # file path - else: - raise ValueError("Model can be only of type RF or XGBOOST") - - def get_pipeline( - self, path: Optional[str] = None, destination: Optional[str] = None - ) -> None: - """Download trained pipeline from remote model registry. - - The `get_pipeline` method downloads the selected trained pipeline from the pyrorisks remote - model registry. The downloaded pipeline is persited in the destination joblib file. - - Args: - path: Location and file name of the pipeline to download, relative to the root of the - dvc project. Defaults to None (self.model_path). - destination: Location where the pipeline is downloaded. Defaults to None (self.model_path). - """ - path = self.model_path if path is None else path - destination = self.model_path if destination is None else destination - - pipeline = joblib.load( - BytesIO( - dvc.api.read( - path=path, repo=cfg.REPO_DIR, remote="artifacts-registry", mode="rb" - ) - ) - ) - joblib.dump(pipeline, destination) - - @staticmethod - def get_inputs( - day: str, - country: Optional[str] = "France", - dir_destination: Optional[str] = None, - ) -> None: - """Download datasets and build features for forecasting daily wildfire risks on a given date. - - The `get_inputs` method downloads datsets from publicly available data sources (CDS API) and - build features for forecasting wildfire risks on a given date. The downloaded inputs are - persited in the destination csv file. - - Args: - day: Date of interest ('%Y-%m-%d') for example '2020-05-05'. - country: Country of interest. Defaults to 'France'. - destination: Location where the daily inputs are persisted. - Defaults to None (cfg.PIPELINE_INPUT_PATH). - """ - # TODO: - # Delete get_fwi_data_for_predict variables not available at predict time - # Create process_era5 function - # Create MergedEraViir class - dir_destination = ( - cfg.PREDICTIONS_REGISTRY if dir_destination is None else dir_destination - ) - fname = f"inputs_{country}_{day}.csv" - destination = os.path.join(dir_destination, fname) - fwi = get_fwi_data_for_predict(day) - era = get_data_era5t_for_predict(day) - res_test = process_dataset_to_predict(fwi, era) - res_test = res_test.rename({"nom": "departement"}, axis=1) - res_test.to_csv(destination) - - def load_pipeline(self, path: Optional[str] = None) -> None: - """Load trained pipeline from local path. - - Args: - path: Location where the pipeline has been downloaded. Defaults to None (self.model_path). - """ - path = self.model_path if path is None else path - - if os.path.isfile(path): - self.pipeline = joblib.load(path) - else: - self.get_pipeline(destination=path) - self.pipeline = joblib.load(path) - - def load_inputs( - self, - day: str, - country: Optional[str] = "France", - usecols: Optional[List[str]] = None, - dir_path: Optional[str] = None, - ) -> None: - """Load inputs from local path. - - Args: - day: Date of interest ('%Y-%m-%d') for example '2020-05-05'. - country: Country of interest. Defaults to 'France'. - dir_path: Location where the daily inputs have been persisted. Defaults to None - (cfg.PREDICTIONS_REGISTRY). - """ - dir_path = cfg.PREDICTIONS_REGISTRY if dir_path is None else dir_path - usecols = cfg.PIPELINE_COLS if usecols is None else usecols - fname = f"inputs_{country}_{day}.csv" - - path = os.path.join(dir_path, fname) - - if os.path.isfile(path): - self.inputs = pd.read_csv(path, usecols=usecols) - else: - self.get_inputs(day=day, country=country, dir_destination=dir_path) - self.inputs = pd.read_csv(path, usecols=usecols) - self.inputs[cfg.DATE_VAR] = pd.to_datetime(self.inputs[cfg.DATE_VAR]) - - def predict( - self, - day: str, - country: Optional[str] = "France", - zone_column: Optional[str] = cfg.ZONE_VAR, - dir_destination: Optional[str] = None, - ) -> None: - """Predict local daily wildfire risks in a given country. - - Forecast the local (NUTS 3 level) daily wildfire risks (forest fire danger) in a given - country (France). Note that predictions on fwi and era5land data queried from CDS API - will return 93 departments instead of 96 for France. - - Args: - day: Date of interest ('%Y-%m-%d') for example '2020-05-05'. - country: Country of interest. Defaults to 'France'. - dir_destination: Location where the daily inputs are persisted. - Defaults to None (cfg.PREDICTIONS_REGISTRY). - """ - path = ( - os.path.join(dir_destination, f"{self.model}.joblib") - if dir_destination is not None - else os.path.join(cfg.PREDICTIONS_REGISTRY, f"{self.model}.joblib") - ) - self.load_pipeline(path=path) - self.load_inputs(day=day, country=country, dir_path=dir_destination) - - fname = f"{self.model}_predictions_{country}_{day}.joblib" - destination = os.path.join(dir_destination, fname) - - if self.model == "RF": - self.predictions = self.pipeline.predict_proba(self.inputs) - res = dict(zip(self.inputs[zone_column], self.predictions[:, 1].round(3))) - elif self.model == "XGBOOST": - self.predictions = self.pipeline.predict_proba(self.inputs) - res = dict(zip(self.inputs[zone_column], self.predictions.round(3))) - logging.info( - f"Predict {country} local wildfire risks on {day}, using {self.model} pipeline." - ) - joblib.dump(res, destination) - - def get_predictions( - self, - day: str, - country: Optional[str] = "France", - dir_path: Optional[str] = None, - dir_destination: Optional[str] = None, - ) -> None: - """Download predictions for the day of interest from the remote prediction registry. - - The `get_predictions` method downloads the forecasted local wildfire risks at a given - day in a given country (France). The downloaded predictions are persited in the - destination joblib file. - - Args: - day: Date of interest ('%Y-%m-%d') for example '2020-05-05'. - country: Country of interest. Defaults to 'France'. - dir_path: Location of the predictions to download, relative to the root of the dvc project. - Defaults to None. - dir_destination: Location where the daily predictions are persisted. Defaults to None. - """ - dir_path = cfg.PREDICTIONS_REGISTRY if dir_path is None else dir_path - dir_destination = ( - cfg.PREDICTIONS_REGISTRY if dir_destination is None else dir_destination - ) - fname = f"{self.model}_predictions_{country}_{day}.joblib" - destination = os.path.join(dir_destination, fname) - path = os.path.join(dir_path, fname) - - predictions = joblib.load( - BytesIO( - dvc.api.read( - path=path, repo=cfg.REPO_DIR, remote="artifacts-registry", mode="rb" - ) - ) - ) - joblib.dump(predictions, destination) - - def expose_predictions( - self, - day: str, - country: Optional[str] = "France", - dir_path: Optional[str] = None, - dir_destination: Optional[str] = None, - ) -> dict: - """Serves a prediction for the specified day. - - Args: - day: Date of interest ('%Y-%m-%d') for example '2020-05-05'. - country: Country of interest. Defaults to 'France'. - dir_path: Location of the predictions to download, relative to the root of the dvc project. - Defaults to None. - - Returns: - dict[dict]: keys are departements, values dictionaries whose keys are score and explainability - and values probability predictions for label 1 (fire) and feature contributions to predictions - respectively. - """ - fname = f"{self.model}_predictions_{country}_{day}.joblib" - path = os.path.join(dir_destination, fname) - - if os.path.isfile(path): - self.predictions = joblib.load(path) - else: - self.get_predictions( - day=day, - country=country, - dir_path=dir_path, - dir_destination=dir_destination, - ) - self.predictions = joblib.load(path) - return { - x: {"score": self.predictions[x], "explainability": None} - for x in self.predictions - } diff --git a/pyro_risks/pipeline/train.py b/pyro_risks/pipeline/train.py deleted file mode 100644 index b960546..0000000 --- a/pyro_risks/pipeline/train.py +++ /dev/null @@ -1,159 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from typing import Union, Optional -from sklearn.model_selection import train_test_split -from sklearn.metrics import precision_recall_curve -from sklearn.utils import estimator_html_repr -from pyro_risks.models import xgb_pipeline, rf_pipeline, discretizer -from pyro_risks.pipeline.load import load_dataset -from datetime import datetime -import imblearn.pipeline as pp -import pyro_risks.config as cfg - -import sys -import pandas as pd -import numpy as np - -import os -import time -import json -import joblib - -__all__ = ["calibrate_pipeline", "save_pipeline", "train_pipeline"] - - -def calibrate_pipeline( - y_test: Union[pd.Series, np.ndarray], - y_scores: Union[pd.Series, np.ndarray], - ignore_prints: Optional[bool] = False, -) -> np.float64: - """ - Calibrate Classification Pipeline. - - Args: - y_test: Binary test target. - y_scores: Predicted probabilities from the test set. - ignore_prints: Whether to print results. Defaults to False. - - Returns: - Threshold maximizing the f1-score. - """ - precision, recall, thresholds = precision_recall_curve(y_test, y_scores[:, 1]) - fscore = (2 * precision * recall) / (precision + recall) - ix = np.argmax(fscore) - - if not ignore_prints: - print(f"Best Threshold={thresholds[ix]}, F-Score={fscore[ix]}") - - return thresholds[ix] - - -def save_pipeline( - pipeline: pp.Pipeline, - model: str, - optimal_threshold: np.float64, - destination: Optional[str] = None, - ignore_html: Optional[bool] = False, -) -> None: - """ - Serialize pipeline. - - Args: - pipeline: imbalanced-learn preprocessing pipeline. - model: model name. - optimal_threshold: model calibration optimal threshold. - destination: folder where the pipeline should be saved. Defaults to 'cfg.MODEL_REGISTRY'. - ignore_html: Persist pipeline html description. Defaults to False. - """ - threshold = {"threshold": float(optimal_threshold)} - registry = cfg.MODEL_REGISTRY if destination is None else destination - pipeline_fname = f"{model}.joblib" - threshold_fname = f"{model}_threshold.json" - html_fname = f"{model}_pipeline.html" - - if not os.path.exists(registry): - os.makedirs(registry) - - joblib.dump(pipeline, os.path.join(registry, pipeline_fname)) - - with open(registry + "/" + threshold_fname, "w") as file: - json.dump(threshold, file) - - if not ignore_html: - with open(registry + "/" + html_fname, "w") as file: - file.write(estimator_html_repr(pipeline)) - - -def train_pipeline( - X: pd.DataFrame, - y: pd.Series, - model: str, - pipeline: Optional[pp.Pipeline] = None, - destination: Optional[str] = None, - ignore_prints: Optional[bool] = False, - ignore_html: Optional[bool] = False, -) -> None: - """ - Train a classification pipeline. - - Args: - X: Training dataset features pd.DataFrame. - y: Training dataset target pd.Series. - model: model name. - pipeline: imbalanced-learn preprocessing pipeline. Defaults to None. - destination: folder where the pipeline should be saved. Defaults to 'cfg.MODEL_REGISTRY'. - ignore_prints: Whether to print results. Defaults to False. - ignore_html: Persist pipeline html description. Defaults to False. - """ - X_train, X_test, y_train, y_test = train_test_split( - X, y, test_size=cfg.TEST_SIZE, random_state=cfg.RANDOM_STATE - ) - - vdiscretizer = np.vectorize(discretizer) - - if model == "RF": - rf_pipeline.fit(X_train, y_train) - y_scores = rf_pipeline.predict_proba(X_test) - optimal_threshold = calibrate_pipeline( - y_test=vdiscretizer(y_test), y_scores=y_scores, ignore_prints=ignore_prints - ) - save_pipeline( - pipeline=rf_pipeline, - model=model, - optimal_threshold=optimal_threshold, - destination=destination, - ignore_html=ignore_html, - ) - - elif model == "XGBOOST": - xgb_pipeline.fit( - X_train, y_train, xgboost__eval_metric=cfg.XGB_FIT_PARAMS["eval_metric"] - ) - y_scores = xgb_pipeline.predict_proba(X_test) - optimal_threshold = calibrate_pipeline( - y_test=vdiscretizer(y_test), y_scores=y_scores, ignore_prints=ignore_prints - ) - save_pipeline( - pipeline=xgb_pipeline, - model=model, - optimal_threshold=optimal_threshold, - destination=destination, - ignore_html=ignore_html, - ) - - elif model not in ["RF", "XGBOOST"] and pipeline is not None: - pipeline.fit(X_train, y_train) - y_scores = pipeline.predict_proba(X_test) - optimal_threshold = calibrate_pipeline( - y_test=vdiscretizer(y_test), y_scores=y_scores, ignore_prints=ignore_prints - ) - save_pipeline( - pipeline=pipeline, - model=model, - optimal_threshold=optimal_threshold, - destination=destination, - ignore_html=ignore_html, - ) diff --git a/pyro_risks/version.py b/pyro_risks/version.py deleted file mode 100644 index 8f02035..0000000 --- a/pyro_risks/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0a0" diff --git a/pyrorisks/__init__.py b/pyrorisks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pyrorisks/platform_fwi/__init__.py b/pyrorisks/platform_fwi/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pyro_risks/platform_fwi/get_fwi_effis_score.py b/pyrorisks/platform_fwi/get_fwi_effis_score.py similarity index 90% rename from pyro_risks/platform_fwi/get_fwi_effis_score.py rename to pyrorisks/platform_fwi/get_fwi_effis_score.py index 0ac52c4..3f7e289 100644 --- a/pyro_risks/platform_fwi/get_fwi_effis_score.py +++ b/pyrorisks/platform_fwi/get_fwi_effis_score.py @@ -6,6 +6,8 @@ from pyro_risks.utils.s3 import S3Bucket __all__ = ["get_score"] + + def point_fwi_category(row, point_coords): if row["geometry"].contains(point_coords): return row["fwi_category"] @@ -35,8 +37,6 @@ def get_score(lat, lon): gdf = gpd.GeoDataFrame.from_features(json_content["features"]) - gdf["fwi_category_for_point"] = gdf.apply( - lambda row: point_fwi_category(row, point_coords), axis=1 - ) + gdf["fwi_category_for_point"] = gdf.apply(lambda row: point_fwi_category(row, point_coords), axis=1) point_fwi_score = gdf.dropna().iloc[0]["fwi_category"] return point_fwi_score diff --git a/pyro_risks/platform_fwi/main.py b/pyrorisks/platform_fwi/main.py similarity index 93% rename from pyro_risks/platform_fwi/main.py rename to pyrorisks/platform_fwi/main.py index e311bc4..c587dc8 100644 --- a/pyro_risks/platform_fwi/main.py +++ b/pyrorisks/platform_fwi/main.py @@ -32,9 +32,7 @@ def main(retrieved_date): fwi = FWIHelpers() gdf_fwi = fwi.get_fwi(effis_tiff_file_url) gdf_fwi = fwi.fwi_sea_remover(gdf_fwi) - gdf_fwi["fwi_category"] = gdf_fwi.apply( - lambda row: fwi.fwi_category(row["fwi_pixel_value"]), axis=1 - ) + gdf_fwi["fwi_category"] = gdf_fwi.apply(lambda row: fwi.fwi_category(row["fwi_pixel_value"]), axis=1) gdf_fwi = gdf_fwi.drop("fwi_pixel_value", axis=1) new_json_fwi = fwi.fwi_geojson_maker(gdf_fwi) diff --git a/pyrorisks/utils/__init__.py b/pyrorisks/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pyro_risks/utils/fwi_helpers.py b/pyrorisks/utils/fwi_helpers.py similarity index 90% rename from pyro_risks/utils/fwi_helpers.py rename to pyrorisks/utils/fwi_helpers.py index 2443b73..083428f 100644 --- a/pyro_risks/utils/fwi_helpers.py +++ b/pyrorisks/utils/fwi_helpers.py @@ -1,10 +1,9 @@ import rasterio -from rasterio.features import shapes import geopandas as gpd import requests from io import BytesIO import json -from typing import Optional +from typing import Optional, Dict, Any class FWIHelpers: @@ -41,15 +40,11 @@ def get_fwi(self, tiff_url: str) -> Optional[gpd.GeoDataFrame]: image = src.read(1) # first band results = ( {"properties": {"fwi_pixel_value": v}, "geometry": s} - for i, (s, v) in enumerate( - shapes(image, mask=mask, transform=data["transform"]) - ) + for i, (s, v) in enumerate(rasterio.features.shapes(image, mask=mask, transform=data["transform"])) ) geoms = list(results) - gpd_polygonized_raster = gpd.GeoDataFrame.from_features( - geoms, crs=str(data["crs"]) - ) + gpd_polygonized_raster = gpd.GeoDataFrame.from_features(geoms, crs=str(data["crs"])) return gpd_polygonized_raster except Exception as e: @@ -66,9 +61,7 @@ def fwi_sea_remover(self, geodataframe: gpd.GeoDataFrame) -> gpd.GeoDataFrame: Returns: geodataframe (geopandas.GeoDataFrame): The GeoDataFrame without the sea. """ - geodataframe = geodataframe.loc[ - (geodataframe["fwi_pixel_value"] != 0) - ] # remove the sea + geodataframe = geodataframe.loc[(geodataframe["fwi_pixel_value"] != 0)] # remove the sea return geodataframe def fwi_category(self, fwi_pixel_val: int) -> int: @@ -102,7 +95,7 @@ def fwi_category(self, fwi_pixel_val: int) -> int: return 3 - def fwi_geojson_maker(self, geodataframe: gpd.GeoDataFrame) -> json: + def fwi_geojson_maker(self, geodataframe: gpd.GeoDataFrame) -> Dict[str, Any]: """ Converts a GeoDataFrame into a GeoJSON. diff --git a/pyro_risks/utils/s3.py b/pyrorisks/utils/s3.py similarity index 88% rename from pyro_risks/utils/s3.py rename to pyrorisks/utils/s3.py index a54cf27..1e9592b 100644 --- a/pyro_risks/utils/s3.py +++ b/pyrorisks/utils/s3.py @@ -1,5 +1,6 @@ import boto3 import json +from typing import Dict, Any, Optional import os @@ -13,7 +14,7 @@ class S3Bucket: Example: To create an instance of the S3Bucket class with a session, use: - >>> from pyro_risks.utils.s3 import S3Bucket + >>> from pyrorisks.utils.s3 import S3Bucket >>> s3 = S3Bucket( bucket_name='mybucket', @@ -97,7 +98,7 @@ def upload_file(self, file_path: str, object_key: str) -> None: """ self.bucket.upload_file(file_path, object_key) - def write_json_to_s3(self, json_data: json, object_key: str) -> None: + def write_json_to_s3(self, json_data: Dict[str, Any], object_key: str) -> None: """ Writes a JSON file on the S3 bucket. @@ -105,9 +106,7 @@ def write_json_to_s3(self, json_data: json, object_key: str) -> None: json_data (json): The JSON data we want to upload. object_key (str): The S3 key (path) where the file will be stored. """ - self.bucket.put_object( - Key=object_key, Body=bytes(json.dumps(json_data).encode("UTF-8")) - ) + self.bucket.put_object(Key=object_key, Body=bytes(json.dumps(json_data).encode("UTF-8"))) def read_json_from_s3(self, object_key: str) -> None: """ @@ -116,9 +115,7 @@ def read_json_from_s3(self, object_key: str) -> None: Args: object_key (str): The S3 key (path) where the file is stored. """ - file_content = ( - self.bucket.Object(object_key).get()["Body"].read().decode("utf-8") - ) + file_content = self.bucket.Object(object_key).get()["Body"].read().decode("utf-8") json_content = json.loads(file_content) return json_content @@ -180,7 +177,7 @@ def list_folders(self, prefix: str = "", delimiter: str = "") -> list[str]: def list_files( self, - patterns: list[str] = None, + patterns: Optional[list[str]] = None, prefix: str = "", delimiter: str = "", limit: int = 0, @@ -202,9 +199,7 @@ def list_files( if limit != 0: object_filter = object_filter.limit(limit) for obj in object_filter: - if not patterns or ( - type(patterns) == list and any([p in obj.key for p in patterns]) - ): + if not patterns or (isinstance(patterns, list) and any([p in obj.key for p in patterns])): files.append(obj.key) return files @@ -223,7 +218,7 @@ def get_file_metadata(self, object_key: str) -> dict: return metadata def get_files_metadata( - self, patterns: list[str] = None, prefix: str = "", delimiter: str = "" + self, patterns: Optional[list[str]] = None, prefix: str = "", delimiter: str = "" ) -> list[dict]: """ Lists files in the S3 bucket with their size in bytes and last modified dates. @@ -238,16 +233,12 @@ def get_files_metadata( """ files = [] for obj in self.bucket.objects.filter(Prefix=prefix, Delimiter=delimiter): - if not patterns or ( - type(patterns) == list and any([p in obj.key for p in patterns]) - ): - files.append( - { - "file_name": obj.key, - "file_size": round(obj.size * 1.0 / (1024), 2), - "file_last_modified": obj.last_modified, - } - ) + if not patterns or (isinstance(patterns, list) and any([p in obj.key for p in patterns])): + files.append({ + "file_name": obj.key, + "file_size": round(obj.size * 1.0 / (1024), 2), + "file_last_modified": obj.last_modified, + }) return files @@ -277,7 +268,5 @@ def read_credentials( for line in lines: if "region" in line: credentials["region_name"] = line.split("=")[1].strip() - credentials["endpoint_url"] = ( - "https://s3." + credentials["region_name"] + ".io.cloud.ovh.net/" - ) + credentials["endpoint_url"] = "https://s3." + credentials["region_name"] + ".io.cloud.ovh.net/" return credentials diff --git a/requirements-app.txt b/requirements-app.txt deleted file mode 100644 index 281e37b..0000000 --- a/requirements-app.txt +++ /dev/null @@ -1,3 +0,0 @@ -fastapi==0.61.1 -uvicorn>=0.11.1 -pyro_risks diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index ba82c03..0000000 --- a/requirements.txt +++ /dev/null @@ -1,24 +0,0 @@ -pandas>=1.1.4 -geopandas>=0.8.1 -Rtree>=0.9.4 -Shapely>=1.7.1 -netCDF4>=1.5.4 -requests>=2.24.0 -xlrd==1.2.0 -boto3==1.26.106 - -cdsapi - -numpy==1.26.4 -xarray>=0.16.1 -scipy>=1.5.4 - -scikit-learn>=0.23.2 -imbalanced-learn>=0.7.0 -xgboost==1.2.1 -cdsapi==0.4.0 -python-dotenv>=0.15.0 - -plot-metric==0.0.6 -dvc>=2.0.5 -dvc[gdrive]>=2.0.5 diff --git a/scripts/example_ERA5_FIRMS.py b/scripts/example_ERA5_FIRMS.py deleted file mode 100644 index 8116725..0000000 --- a/scripts/example_ERA5_FIRMS.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from pyro_risks.datasets import NASAFIRMS, ERA5Land -from pyro_risks.datasets.datasets_mergers import ( - merge_datasets_by_departements, - merge_datasets_by_closest_weather_point, -) -from pyro_risks.datasets.utils import get_intersection_range - - -def main(args): - weather = ERA5Land(args.ERA5) - nasa_firms = NASAFIRMS(args.nasa_firms, args.nasa_firms_type) - print(weather.shape) - print(nasa_firms.shape) - - # Time span selection - date_range = get_intersection_range(weather.time, nasa_firms.acq_date) - weather = weather[weather.time.isin(date_range)] - nasa_firms = nasa_firms[nasa_firms.acq_date.isin(date_range)] - - print(weather.shape) - print(nasa_firms.shape) - - # Merge - if args.type_of_merged == "departements": - # drop redundant columns with weather datasets - nasa_firms = nasa_firms.drop(["nom"], axis=1) - merged_data = merge_datasets_by_departements( - weather, "time", "code", nasa_firms, "acq_date", "code", "left" - ) - to_drop = [ - "acq_date", - "latitude_y", - "longitude_y", - "bright_t31", - "frp", - "acq_date_time", - "confidence", - ] - - else: - # drop redundant columns with weather datasets - nasa_firms = nasa_firms.drop(["code", "nom"], axis=1) - merged_data = merge_datasets_by_closest_weather_point( - weather, "time", nasa_firms, "acq_date" - ) - to_drop = [ - "closest_weather_point", - "acq_date", - "latitude_y", - "longitude_y", - "bright_t31", - "frp", - "acq_date_time", - "confidence", - "weather_lat", - "weather_lon", - ] - - final_data = merged_data.copy() - where = merged_data["confidence"] >= 60 - final_data.loc[where, "Statut"] = 1 - final_data.loc[~where, "Statut"] = 0 - final_data["Statut"] = final_data["Statut"].astype(int) - - # drop unnecessary columns - final_data = final_data.drop(to_drop, axis=1) - - print(final_data) - - -def parse_args(): - import argparse - - parser = argparse.ArgumentParser( - description="Pyronear wildfire history example based on NASA FIRMS and ERA5 Land", - formatter_class=argparse.ArgumentDefaultsHelpFormatter, - ) - - parser.add_argument( - "--ERA5", default=None, type=str, help="path or URL of ERA5 Land source" - ) - - parser.add_argument( - "--nasa_firms", - default=None, - type=str, - help="path or URL of NASA FIRMS data source", - ) - - parser.add_argument( - "--nasa_firms_type", - default="json", - type=str, - help="type of NASA FIRMS data source", - ) - - parser.add_argument( - "--type_of_merged", - default="proximity", - type=str, - help="type of merged between weather and fire datasets: either departements or proximity", - ) - - args = parser.parse_args() - - return args - - -if __name__ == "__main__": - args = parse_args() - main(args) diff --git a/scripts/example_ERA5_VIIRS.py b/scripts/example_ERA5_VIIRS.py deleted file mode 100644 index 84adf9f..0000000 --- a/scripts/example_ERA5_VIIRS.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from pyro_risks.datasets import NASAFIRMS_VIIRS, ERA5Land -from pyro_risks.datasets.datasets_mergers import ( - merge_datasets_by_departements, - merge_by_proximity, -) -from pyro_risks.datasets.utils import get_intersection_range - - -def main(args): - weather = ERA5Land(args.ERA5) - nasa_firms = NASAFIRMS_VIIRS(args.nasa_firms, args.nasa_firms_type) - print(weather.shape) - print(nasa_firms.shape) - - # Time span selection - date_range = get_intersection_range(weather.time, nasa_firms.acq_date) - weather = weather[weather.time.isin(date_range)] - nasa_firms = nasa_firms[nasa_firms.acq_date.isin(date_range)] - - print(weather.shape) - print(nasa_firms.shape) - - # Keep only vegetation wildfires and remove thermal anomalies with low confidence - where = (nasa_firms["confidence"] != "l") & (nasa_firms["type"] == 0) - nasa_firms = nasa_firms[where] - - # Merge - if args.type_of_merged == "departements": - # drop redundant columns with weather datasets - nasa_firms = nasa_firms.drop(["nom"], axis=1) - merged_data = merge_datasets_by_departements( - weather, "time", "code", nasa_firms, "acq_date", "code", "left" - ) - to_drop = [ - "acq_date", - "latitude_y", - "longitude_y", - "bright_ti4", - "confidence", - "bright_ti5", - "frp", - "type", - "acq_date_time", - ] - - else: - # drop redundant columns with weather datasets - nasa_firms = nasa_firms.drop(["code", "nom"], axis=1) - merged_data = merge_by_proximity( - nasa_firms, "acq_date", weather, "time", "right" - ) - to_drop = [ - "latitude_x", - "longitude_x", - "closest_lat", - "closest_lon", - "acq_date", - "bright_ti4", - "confidence", - "bright_ti5", - "frp", - "type", - "acq_date_time", - ] - - final_data = merged_data.copy() - where = merged_data["confidence"].isna() - final_data.loc[~where, "Statut"] = 1 - final_data.loc[where, "Statut"] = 0 - final_data["Statut"] = final_data["Statut"].astype(int) - - # drop unnecessary columns - final_data = final_data.drop(to_drop, axis=1) - final_data = final_data.rename( - columns={"latitude_y": "latitude", "longitude_y": "longitude"} - ) - - print(final_data) - - -def parse_args(): - import argparse - - parser = argparse.ArgumentParser( - description="Pyronear wildfire history example based on NASA FIRMS and ERA5 Land", - formatter_class=argparse.ArgumentDefaultsHelpFormatter, - ) - - parser.add_argument( - "--ERA5", default=None, type=str, help="path or URL of ERA5 Land source" - ) - - parser.add_argument( - "--nasa_firms", - default=None, - type=str, - help="path or URL of NASA FIRMS data source", - ) - - parser.add_argument( - "--nasa_firms_type", - default="csv", - type=str, - help="type of NASA FIRMS data source", - ) - - parser.add_argument( - "--type_of_merged", - default="proximity", - type=str, - help="type of merged between weather and fire datasets: either departements or proximity", - ) - - args = parser.parse_args() - - return args - - -if __name__ == "__main__": - args = parse_args() - main(args) diff --git a/scripts/example_NASA_FIRMS.py b/scripts/example_NASA_FIRMS.py deleted file mode 100644 index a765349..0000000 --- a/scripts/example_NASA_FIRMS.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from pyro_risks.datasets import NASAFIRMS, NOAAWeather -from pyro_risks.datasets.datasets_mergers import ( - merge_datasets_by_closest_weather_station, - merge_datasets_by_departements, -) -from pyro_risks.datasets.utils import get_intersection_range - - -def main(args): - weather = NOAAWeather(args.weather) - nasa_firms = NASAFIRMS(args.nasa_firms, args.nasa_firms_type) - print(weather.shape) - print(nasa_firms.shape) - - # Time span selection - date_range = get_intersection_range(weather.DATE, nasa_firms.acq_date) - weather = weather[weather.DATE.isin(date_range)] - nasa_firms = nasa_firms[nasa_firms.acq_date.isin(date_range)] - - print(weather.shape) - print(nasa_firms.shape) - - # Merge - if args.type_of_merged == "departements": - # drop redundant columns with weather datasets - nasa_firms = nasa_firms.drop(["nom"], axis=1) - merged_data = merge_datasets_by_departements( - weather, "DATE", "code", nasa_firms, "acq_date", "code", "left" - ) - to_drop = [ - # 'closest_weather_station', - "acq_date", - "latitude", - "longitude", - "bright_t31", - "frp", - "acq_date_time", - "confidence", - ] - - else: - # drop redundant columns with weather datasets - nasa_firms = nasa_firms.drop(["code", "nom"], axis=1) - merged_data = merge_datasets_by_closest_weather_station( - weather, "DATE", nasa_firms, "acq_date" - ) - to_drop = [ - "closest_weather_station", - "acq_date", - "latitude", - "longitude", - "bright_t31", - "frp", - "acq_date_time", - "confidence", - ] - - final_data = merged_data.copy() - where = merged_data["confidence"] >= 60 - final_data.loc[where, "Statut"] = 1 - final_data.loc[~where, "Statut"] = 0 - final_data["Statut"] = final_data["Statut"].astype(int) - - # drop unnecessary columns - final_data = final_data.drop(to_drop, axis=1) - - print(final_data) - - -def parse_args(): - import argparse - - parser = argparse.ArgumentParser( - description="Pyronear wildfire history example based on NASA FIRMS", - formatter_class=argparse.ArgumentDefaultsHelpFormatter, - ) - - parser.add_argument( - "--weather", default=None, type=str, help="path or URL of NOAA weather source" - ) - - parser.add_argument( - "--nasa_firms", - default=None, - type=str, - help="path or URL of NASA FIRMS data source", - ) - - parser.add_argument( - "--nasa_firms_type", - default="json", - type=str, - help="type of NASA FIRMS data source", - ) - - parser.add_argument( - "--type_of_merged", - default="departements", - type=str, - help="type of merged between weather and fire datasets: either departements or proximity", - ) - - args = parser.parse_args() - - return args - - -if __name__ == "__main__": - args = parse_args() - main(args) diff --git a/scripts/example_scorev0.py b/scripts/example_scorev0.py deleted file mode 100644 index ab64dff..0000000 --- a/scripts/example_scorev0.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from pyro_risks.datasets import MergedEraFwiViirs -from pyro_risks.models.score_v0 import ( - add_lags, - prepare_dataset, - train_random_forest, - split_train_test, - xgb_model, -) - -SELECTED_DEP = [ - "Pyrénées-Atlantiques", - "Hautes-Pyrénées", - "Ariège", - "Haute-Corse", - "Lozère", - "Gard", - "Hérault", - "Bouches-du-Rhônes", - "Pyrénées-Orientales", - "Cantal", - "Alpes-Maritimes", - "Aveyron", -] - - -def run(): - df = MergedEraFwiViirs() - df_lags = add_lags(df, df.drop(["day", "departement", "fires"], axis=1).columns) - X, y = prepare_dataset(df_lags, selected_dep=SELECTED_DEP) - X_train, X_test, y_train, y_test = split_train_test(X, y) - train_random_forest(X_train, X_test, y_train, y_test, ignore_prints=False) - xgb_model(X_train, y_train, X_test, y_test, ignore_prints=False) - - -if __name__ == "__main__": - run() diff --git a/scripts/example_weather_wildfire.py b/scripts/example_weather_wildfire.py deleted file mode 100644 index 1142319..0000000 --- a/scripts/example_weather_wildfire.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from pyro_risks.datasets import NOAAWeather, BDIFFHistory -from pyro_risks.datasets.datasets_mergers import merge_datasets_by_departements -from pyro_risks.datasets.utils import get_intersection_range - - -def main(args): - weather = NOAAWeather(args.weather) - history = BDIFFHistory(args.wildfire) - - # Time span selection - date_range = get_intersection_range(weather.DATE, history.date) - weather = weather[weather.DATE.isin(date_range)] - history = history[history.date.isin(date_range)] - - # Merge - df = merge_datasets_by_departements( - weather, "DATE", "code", history, "date", "Département", "left" - ) - - # Label data - df.Statut = 1 - df.Statut.isna().astype(int) - - df = df.filter( - items=[ - "DATE", - "code", - "nom", - "LATITUDE", - "LONGITUDE", - "ELEVATION", - "DEWP", - "DEWP_ATTRIBUTES", - "FRSHTT", - "GUST", - "MAX", - "MIN", - "MXSPD", - "PRCP", - "SLP", - "SLP_ATTRIBUTES", - "SNDP", - "STP", - "STP_ATTRIBUTES", - "TEMP", - "TEMP_ATTRIBUTES", - "VISIB", - "VISIB_ATTRIBUTES", - "WDSP", - "WDSP_ATTRIBUTES", - "Statut", - ] - ) - - print(df) - - -def parse_args(): - import argparse - - parser = argparse.ArgumentParser( - description="Pyronear weather & wildfire history example", - formatter_class=argparse.ArgumentDefaultsHelpFormatter, - ) - - parser.add_argument( - "--weather", default=None, type=str, help="path or URL of NOAA weather source" - ) - parser.add_argument( - "--wildfire", default=None, type=str, help="path or URL of BDIFF history source" - ) - - args = parser.parse_args() - - return args - - -if __name__ == "__main__": - args = parse_args() - main(args) diff --git a/setup.py b/setup.py deleted file mode 100644 index 3d61bc4..0000000 --- a/setup.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -#!usr/bin/python - -""" -Package installation setup -""" - -import os -import subprocess -from setuptools import setup, find_packages - -PACKAGE_NAME = "pyro_risks" -VERSION = "0.0.1" - - -with open("README.md") as f: - readme = f.read() - -requirements = [ - "boto3==1.28.45", - "botocore==1.31.45", - "click==8.1.7", - "geopandas==0.13.2", - "pandas==2.1.0", - "python-dotenv==1.0.0", - "rasterio==1.3.9", - "requests==2.31.0", - "numpy==1.26.4", -] - -setup( - name=PACKAGE_NAME, - version=VERSION, - author="Pyronear Contributors", - description="Pre-processing pipelines and models for wildfire forecasting and monitoring", - long_description=readme, - long_description_content_type="text/markdown", - url="https://github.com/pyronear/pyro-risks", - download_url="https://github.com/pyronear/pyro-risks/tags", - license="GPLv3", - entry_points={"console_scripts": ["pyrorisks = pyro_risks.main:main"]}, - classifiers=[ - "Development Status :: 2 - Pre-Alpha", - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", - "Natural Language :: English", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Topic :: Scientific/Engineering", - "Topic :: Scientific/Engineering :: Mathematics", - "Topic :: Scientific/Engineering :: Artificial Intelligence", - "Topic :: Software Development", - "Topic :: Software Development :: Libraries", - "Topic :: Software Development :: Libraries :: Python Modules", - ], - keywords=["data science", "time series", "machine learning"], - packages=find_packages(exclude=("test",)), - zip_safe=True, - python_requires=">=3.6.0", - include_package_data=True, - install_requires=requirements, - package_data={"": ["LICENSE"]}, -) diff --git a/test/test_datasets.py b/test/test_datasets.py deleted file mode 100644 index 29b4379..0000000 --- a/test/test_datasets.py +++ /dev/null @@ -1,500 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import unittest - -import numpy as np -import pandas as pd -import tempfile -import requests -import tarfile -import gzip -import csv -import os - -from pandas.testing import assert_frame_equal - -from io import BytesIO -from pathlib import Path - -from zipfile import ZipFile -from unittest.mock import patch -from geopandas import GeoDataFrame - -import urllib.request -import json - -from pyro_risks import config as cfg -from pyro_risks.datasets import ( - masks, - weather, - wildfires, - utils, - nasa_wildfires, - fwi, - ERA5, - era_fwi_viirs, - queries_api, -) -from pyro_risks.datasets.datasets_mergers import ( - merge_datasets_by_departements, - merge_datasets_by_closest_weather_station, - merge_datasets_by_closest_weather_point, - merge_by_proximity, -) - - -class UtilsTester(unittest.TestCase): - def _test_get_intersection_range(self, s1, s2, expected_len): - date_range = utils.get_intersection_range(s1, s2) - self.assertIsInstance(date_range, pd.DatetimeIndex) - self.assertEqual(len(date_range), expected_len) - - # Template unittest - def test_get_intersection_range(self): - # Non-intersecting series - s1 = pd.Series(pd.date_range("2020-01-01", "2020-08-31")) - s2 = pd.Series(pd.date_range("2020-09-01", "2020-11-01")) - self.assertRaises(ValueError, utils.get_intersection_range, s1, s2) - - # s2 included in s1 - s1 = pd.Series(pd.date_range("2020-01-01", "2020-12-31")) - s2 = pd.Series(pd.date_range("2020-09-01", "2020-09-30")) - self._test_get_intersection_range(s1, s2, 30) - - # s2 included in s1 - s1 = pd.Series(pd.date_range("2020-09-01", "2020-11-01")) - s2 = pd.Series(pd.date_range("2020-10-01", "2020-12-01")) - self._test_get_intersection_range(s1, s2, 32) - - def test_load_data(self): - with tempfile.TemporaryDirectory() as destination: - fwi.load_data(output_path=destination) - self.assertTrue( - Path(destination, "fwi_unzipped/JRC_FWI_20190101.nc").is_file() - ) - - def test_get_fwi_data(self): - with tempfile.TemporaryDirectory() as tmp: - fwi.load_data(output_path=tmp) - df = fwi.get_fwi_data(source_path=tmp) - self.assertIsInstance(df, pd.DataFrame) - self.assertEqual(df.shape, (26538, 11)) - - def test_create_departement_df(self): - test_data = pd.DataFrame( - { - "latitude": { - 0: 47.978, - 1: 46.783, - 2: 43.760, - }, - "longitude": { - 0: 5.132, - 1: 4.710, - 2: 1.335, - }, - "fwi": {0: 6.7, 1: 0.3, 2: 8.9}, - } - ) - res = fwi.create_departement_df(day_data=test_data) - true_res = pd.DataFrame( - { - "latitude": {0: 47.978, 1: 46.783, 2: 43.76}, - "longitude": {0: 5.132, 1: 4.71, 2: 1.335}, - "departement": { - 0: "Haute-Marne", - 1: "Saône-et-Loire", - 2: "Haute-Garonne", - }, - } - ) - assert_frame_equal(res, true_res) - - def test_include_departement(self): - test_row = pd.Series({"latitude": 51.072, "longitude": 2.531, "fwi": 0.0}) - with urllib.request.urlopen(cfg.FR_GEOJSON) as url: - dep_polygons = json.loads(url.read().decode()) - self.assertEqual(fwi.include_department(test_row, dep_polygons), "Nord") - - @patch("pyro_risks.datasets.utils.requests.get") - def test_url_retrieve(self, mock_get): - mock_get.return_value.status_code = 200 - mock_get.return_value.content = bytes("WEATHER OR WILDFIRE FILE", "utf-8") - content = utils.url_retrieve("url") - self.assertIsInstance(content, bytes) - - mock_get.return_value.status_code = 400 - mock_get.return_value.content = bytes("WEATHER OR WILDFIRE FILE", "utf-8") - self.assertRaises( - requests.exceptions.ConnectionError, utils.url_retrieve, "url" - ) - - def test_get_fname(self): - url_firms = "https://firms.modaps.eosdis.nasa.gov/data/active_fire/c6/csv/MODIS_C6_Europe_24h.csv" - url_ghcn = "https://www1.ncdc.noaa.gov/pub/data/ghcn/daily/by_year/2020.csv.gz" - url_isd = "https://www.ncei.noaa.gov/data/global-hourly/archive/csv/2020.tar.gz" - - self.assertEqual( - utils.get_fname(url_firms), ("MODIS_C6_Europe_24h", "csv", None) - ) - self.assertEqual(utils.get_fname(url_ghcn), ("2020", "csv", "gz")) - self.assertEqual(utils.get_fname(url_isd), ("2020", None, "tar.gz")) - - @staticmethod - def _mock_csv(destination, fname): - unzipped_content = [ - ["col1", "col2", "col3", "col4"], - ["test", "test", "test", "test"], - ["test", "test", "test", "test"], - ["test", "test", "test", "test"], - ] - - full_path = os.path.join(destination, "server/") - - os.makedirs(os.path.dirname(full_path), exist_ok=True) - - with open(os.path.join(full_path, fname), mode="w") as csvfile: - writer = csv.writer(csvfile) - writer.writerows(unzipped_content) - - def _make_tarfile(self, destination): - self._mock_csv(destination, "test_tar.csv") - - full_path = os.path.join(destination, "server/") - out = tarfile.open(os.path.join(full_path, "test.tar.gz"), "w:gz") - out.add(full_path, arcname=os.path.basename(full_path)) - out.close() - - with open(os.path.join(full_path, "test.tar.gz"), "rb") as tar_file: - memory_file = BytesIO(tar_file.read()) - - return memory_file - - def _make_gzipfile(self, destination): - self._mock_csv(destination, "test_gz.csv") - - full_path = os.path.join(destination, "server/") - with gzip.GzipFile(os.path.join(full_path, "test.gz"), mode="w") as gz, open( - os.path.join(full_path, "test_gz.csv"), mode="r" - ) as csvfile: - gz.write(csvfile.read().encode()) - gz.close() - - with open(os.path.join(full_path, "test.gz"), "rb") as gz_file: - memory_file = BytesIO(gz_file.read()) - - return memory_file - - def _make_zipfile(self, destination): - self._mock_csv(destination, "test_zip.csv") - - full_path = os.path.join(destination, "server/") - with ZipFile(os.path.join(full_path, "test.zip"), "w") as zip_file: - zip_file.write( - os.path.join(full_path, "test_zip.csv"), - os.path.basename(os.path.join(full_path, "test_zip.csv")), - ) - - with open(os.path.join(full_path, "test.zip"), "rb") as zip_file: - memory_file = BytesIO(zip_file.read()) - - return memory_file - - def _make_csv(self, destination): - self._mock_csv(destination, "test_csv.csv") - - full_path = os.path.join(destination, "server/") - with open(os.path.join(full_path, "test_csv.csv"), "rb") as csv_file: - memory_file = BytesIO(csv_file.read()) - - return memory_file - - @staticmethod - def _mock_fname(compression): - if compression == "tar.gz": - return ("test_tar", "csv", "tar.gz") - - elif compression == "zip": - return ("test_zip", "csv", "zip") - - elif compression == "csv": - return ("test_csv", "csv", None) - - else: - return ("test_gz", "csv", "gz") - - @patch("pyro_risks.datasets.utils.get_fname") - @patch("pyro_risks.datasets.utils.url_retrieve") - def test_download(self, mock_url_retrieve, mock_fname): - with tempfile.TemporaryDirectory() as destination: - full_path = os.path.join(destination, "client/") - - mock_fname.return_value = self._mock_fname("tar.gz") - mock_url_retrieve.return_value = self._make_tarfile(destination).read() - utils.download(url="url", default_extension="csv", destination=full_path) - self.assertTrue(Path(full_path, "test_tar.csv").is_file()) - - mock_fname.return_value = self._mock_fname("zip") - mock_url_retrieve.return_value = self._make_zipfile(destination).read() - utils.download(url="url", default_extension="csv", destination=full_path) - self.assertTrue(Path(full_path, "test_zip.csv").is_file()) - - mock_fname.return_value = self._mock_fname("gz") - mock_url_retrieve.return_value = self._make_gzipfile(destination).read() - utils.download(url="url", default_extension="csv", destination=full_path) - self.assertTrue(Path(full_path, "test_gz.csv").is_file()) - - mock_fname.return_value = self._mock_fname("csv") - mock_url_retrieve.return_value = self._make_csv(destination).read() - utils.download( - url="url", default_extension="csv", unzip=False, destination=full_path - ) - self.assertTrue(Path(full_path, "test_csv.csv").is_file()) - - mock_fname.return_value = self._mock_fname("gz") - mock_url_retrieve.return_value = self._make_gzipfile(destination).read() - utils.download( - url="url", default_extension="csv", unzip=False, destination=full_path - ) - self.assertTrue(Path(full_path, "test_gz.gz").is_file()) - - mock_fname.return_value = self._mock_fname("csv") - self.assertRaises(ValueError, utils.download, "url", "csv", True, full_path) - # utils.download(url='url', default_extension="csv", unzip=False, destination=full_path) - - def test_get_modis(self): - with tempfile.TemporaryDirectory() as destination: - utils.get_modis( - start_year=2000, end_year=2001, yearly=True, destination=destination - ) - utils.get_modis(destination=destination) - self.assertTrue(Path(destination, "modis_2000_France.csv").is_file()) - self.assertTrue(Path(destination, "MODIS_C6_Europe_24h.csv").is_file()) - - def test_get_ghcn(self): - with tempfile.TemporaryDirectory() as destination: - utils.get_ghcn(start_year=2000, end_year=2001, destination=destination) - self.assertTrue(Path(destination, "2000.csv").is_file()) - - def test_find_closest_weather_station(self): - # Dataframe without STATION column - df = pd.DataFrame( - np.array([[5.876, 23.875], [8.986, 12.978]]), - columns=["LATITUDE", "LONGITUDE"], - ) - self.assertRaises( - ValueError, utils.find_closest_weather_station, df, 3.871, 11.234 - ) - - # Dataframe with STATION column - df = pd.DataFrame( - np.array( - [ - [5676499, 5.876, 23.875], - [4597821, 3.286, 12.978], - [8767822, 8.564, 10.764], - ] - ), - columns=["STATION", "LATITUDE", "LONGITUDE"], - ) - ref_station = utils.find_closest_weather_station(df, 3.871, 11.234) - self.assertIsInstance(ref_station, int) - - def test_merge_datasets_by_departements(self): - df_weather = weather.NOAAWeather() - df_fires = wildfires.BDIFFHistory() - df = merge_datasets_by_departements( - df_weather, "DATE", "code", df_fires, "date", "Département", "left" - ) - self.assertIsInstance(df, pd.DataFrame) - - def test_merge_datasets_by_closest_weather_station(self): - df_weather = weather.NOAAWeather() - nasa_firms = nasa_wildfires.NASAFIRMS() - df = merge_datasets_by_closest_weather_station( - df_weather, "DATE", nasa_firms, "acq_date" - ) - self.assertIsInstance(df, pd.DataFrame) - - def test_merge_datasets_by_closest_weather_point(self): - df_weather = pd.DataFrame( - np.array( - [ - [5.876, 23.875, "2019-06-24"], - [3.286, 12.978, "2019-10-02"], - [8.564, 10.764, "2019-03-12"], - ] - ), - columns=["latitude", "longitude", "time"], - ) - df_weather["latitude"] = df_weather["latitude"].astype(float) - df_weather["longitude"] = df_weather["longitude"].astype(float) - df_weather["time"] = pd.to_datetime( - df_weather["time"], format="%Y-%m-%d", errors="coerce" - ) - nasa_firms = nasa_wildfires.NASAFIRMS() - df = merge_datasets_by_closest_weather_point( - df_weather, "time", nasa_firms, "acq_date" - ) - self.assertIsInstance(df, pd.DataFrame) - - def test_merge_datasets_by_proximity(self): - df_weather = pd.DataFrame( - np.array( - [ - [5.876, 23.875, "2019-06-24"], - [3.286, 12.978, "2019-10-02"], - [8.564, 10.764, "2019-03-12"], - ] - ), - columns=["latitude", "longitude", "time"], - ) - df_weather["latitude"] = df_weather["latitude"].astype(float) - df_weather["longitude"] = df_weather["longitude"].astype(float) - df_weather["time"] = pd.to_datetime( - df_weather["time"], format="%Y-%m-%d", errors="coerce" - ) - nasa_firms = nasa_wildfires.NASAFIRMS_VIIRS() - df = merge_by_proximity(nasa_firms, "acq_date", df_weather, "time", "right") - self.assertIsInstance(df, pd.DataFrame) - - -class DatasetsTester(unittest.TestCase): - def test_get_french_geom(self): - fr_geom = masks.get_french_geom() - self.assertIsInstance(fr_geom, GeoDataFrame) - self.assertTrue( - all( - v1 == v2 for v1, v2 in zip(fr_geom.columns, ["code", "nom", "geometry"]) - ) - ) - - def test_noaaweather(self): - ds = weather.NOAAWeather() - self.assertIsInstance(ds, pd.DataFrame) - - def test_bdiffhistory(self): - ds = wildfires.BDIFFHistory() - self.assertIsInstance(ds, pd.DataFrame) - - def test_nasafirms_json(self): - ds = nasa_wildfires.NASAFIRMS() - self.assertIsInstance(ds, pd.DataFrame) - - def test_nasafirms_csv(self): - ds = nasa_wildfires.NASAFIRMS( - source_path=cfg.TEST_FR_FIRMS_CSV_FALLBACK, fmt="csv" - ) - self.assertIsInstance(ds, pd.DataFrame) - - def test_nasafirms_xlsx(self): - ds = nasa_wildfires.NASAFIRMS( - source_path=cfg.TEST_FR_FIRMS_XLSX_FALLBACK, fmt="xlsx" - ) - self.assertIsInstance(ds, pd.DataFrame) - - def test_nasaviirs_csv(self): - ds = nasa_wildfires.NASAFIRMS_VIIRS() - self.assertIsInstance(ds, pd.DataFrame) - - def test_nasaviirs_xlsx(self): - ds = nasa_wildfires.NASAFIRMS_VIIRS( - source_path=cfg.TEST_FR_VIIRS_XLSX_FALLBACK, fmt="xlsx" - ) - self.assertIsInstance(ds, pd.DataFrame) - - def test_nasaviirs_json(self): - ds = nasa_wildfires.NASAFIRMS_VIIRS( - source_path=cfg.TEST_FR_VIIRS_JSON_FALLBACK, fmt="json" - ) - self.assertIsInstance(ds, pd.DataFrame) - - def test_gwisfwi(self): - ds = fwi.GwisFwi() - self.assertIsInstance(ds, pd.DataFrame) - - def test_era5land(self): - ds = ERA5.ERA5Land(source_path=cfg.TEST_FR_ERA5LAND_FALLBACK) - self.assertIsInstance(ds, pd.DataFrame) - - def test_era5t(self): - ds = ERA5.ERA5T(source_path=cfg.TEST_FR_ERA5LAND_FALLBACK) - self.assertIsInstance(ds, pd.DataFrame) - - def test_MergedEraFwiViirs(self): - ds = era_fwi_viirs.MergedEraFwiViirs( - era_source_path=cfg.TEST_FR_ERA5T_FALLBACK, - viirs_source_path=None, - fwi_source_path=cfg.TEST_FWI_FALLBACK, - ) - self.assertIsInstance(ds, pd.DataFrame) - self.assertTrue(len(ds) > 0) - - def test_call_era5land(self): - with tempfile.TemporaryDirectory() as tmp: - queries_api.call_era5land(tmp, "2020", "07", "15") - self.assertTrue(os.path.isfile(os.path.join(tmp, "era5land_2020_07_15.nc"))) - - def test_call_era5t(self): - with tempfile.TemporaryDirectory() as tmp: - queries_api.call_era5t(tmp, "2020", "07", "15") - self.assertTrue(os.path.isfile(os.path.join(tmp, "era5t_2020_07_15.nc"))) - - def test_call_fwi(self): - with tempfile.TemporaryDirectory() as tmp: - queries_api.call_fwi(tmp, "2020", "07", "15") - self.assertTrue(os.path.isfile(os.path.join(tmp, "fwi_2020_07_15.zip"))) - - def test_get_fwi_from_api(self): - res = fwi.get_fwi_from_api("2020-07-15") - self.assertIsInstance(res, pd.DataFrame) - self.assertEqual(len(res), 1039) - self.assertEqual(res.iloc[0]["nom"], "Aisne") - self.assertEqual(res.iloc[78]["isi"], np.float32(5.120605)) - - def test_get_fwi_data_for_predict(self): - res = fwi.get_fwi_data_for_predict("2020-05-05") - self.assertTrue( - np.array_equal( - res.day.unique(), - np.array(["2020-05-05", "2020-05-04", "2020-05-02", "2020-04-28"]), - ) - ) - - def test_get_data_era5land_for_predict(self): - res = ERA5.get_data_era5land_for_predict("2020-05-05") - self.assertTrue( - np.array_equal( - res.time.unique(), - np.array( - ["2020-05-05", "2020-05-04", "2020-05-02", "2020-04-28"], - dtype="datetime64[ns]", - ), - ) - ) - self.assertTrue("evaow" in res.columns) - - def test_get_data_era5t_for_predict(self): - res = ERA5.get_data_era5t_for_predict("2020-07-15") - self.assertTrue("u10" in res.columns) - self.assertEqual(len(res), 4156) - - def test_process_dataset_to_predict(self): - fwi = pd.read_csv(cfg.TEST_FWI_TO_PREDICT) - era = pd.read_csv(cfg.TEST_ERA_TO_PREDICT) - res = era_fwi_viirs.process_dataset_to_predict(fwi, era) - self.assertTrue( - np.array_equal( - res.loc[res["nom"] == "Vienne", "fwi_max"].values, - np.array( - [1.2649848, 0.06888488, 0.74846804, 1.6156918], dtype=np.float64 - ), - ) - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_evaluate.py b/test/test_evaluate.py deleted file mode 100644 index 1c6c93d..0000000 --- a/test/test_evaluate.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from collections import namedtuple -from datetime import datetime -from imblearn.pipeline import Pipeline -from sklearn.dummy import DummyClassifier -from sklearn.model_selection import train_test_split -from sklearn.datasets import make_classification -from pyro_risks.pipeline import train_pipeline, save_pipeline -from pyro_risks.pipeline import ( - save_classification_reports, - save_classification_plots, - evaluate_pipeline, -) - - -import numpy as np -import pandas as pd -import pyro_risks.config as cfg - -import unittest -import tempfile -import glob - - -class EvaluateTester(unittest.TestCase): - def test_save_classification_reports(self): - y_true = np.array([0, 0, 1, 1]) - y_pred = np.array([0, 1, 1, 1]) - with tempfile.TemporaryDirectory() as destination: - save_classification_reports( - y_true=y_true, y_pred=y_pred, prefix="TEST", destination=destination - ) - files = glob.glob(destination + "/*") - self.assertTrue(any([".json" in file for file in files])) - self.assertTrue(any([".csv" in file for file in files])) - - def test_save_classification_plots(self): - y_true = np.array([0, 0, 1, 1]) - y_proba = np.array([[0.9, 0.1], [0.6, 0.4], [0.65, 0.35], [0.2, 0.8]]) - with tempfile.TemporaryDirectory() as destination: - save_classification_plots( - y_true=y_true, - y_proba=y_proba[:, 1], - threshold=0.35, - prefix="TEST", - destination=destination, - ) - files = glob.glob(destination + "/*") - self.assertTrue(any([".png" in file for file in files])) - - def test_evaluate_pipeline(self): - X, y = make_classification( - n_samples=100, n_features=5, n_informative=2, n_redundant=2 - ) - X_train, _, y_train, _ = train_test_split( - X, y, test_size=cfg.TEST_SIZE, random_state=cfg.RANDOM_STATE - ) - dummy_pipeline = Pipeline( - [("dummy_classifier", DummyClassifier(strategy="constant", constant=0))] - ) - dummy_pipeline.fit(X_train, y_train) - - with tempfile.TemporaryDirectory() as destination: - threshold = destination + "/DUMMY_threshold.json" - save_pipeline( - pipeline=dummy_pipeline, - model="DUMMY", - optimal_threshold=0, - destination=destination, - ) - evaluate_pipeline( - X=X, - y=y, - pipeline=dummy_pipeline, - threshold=threshold, - prefix="DUMMY", - destination=destination, - ) - files = glob.glob(destination + "/*") - self.assertTrue(any([".png" in file for file in files])) - self.assertTrue(any([".json" in file for file in files])) - self.assertTrue(any([".csv" in file for file in files])) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_headers.py b/test/test_headers.py index f8737f1..31e7a8b 100644 --- a/test/test_headers.py +++ b/test/test_headers.py @@ -41,8 +41,7 @@ def test_headers(self): # Compare it self.assertTrue( any( - "".join(current_header[: min(len(option), len(current_header))]) - == "".join(option) + "".join(current_header[: min(len(option), len(current_header))]) == "".join(option) for option in self.headers ), msg=f"Invalid header in {source_path}", diff --git a/test/test_load.py b/test/test_load.py deleted file mode 100644 index 2e2ce2f..0000000 --- a/test/test_load.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from pyro_risks.pipeline import load_dataset -from unittest import mock - -import pyro_risks.config as cfg -import unittest -import tempfile -import os - - -class LoadTester(unittest.TestCase): - def test_load(self): - with tempfile.TemporaryDirectory() as destination: - with mock.patch("pyro_risks.config.DATA_REGISTRY", destination): - dataset_path = os.path.join(destination, cfg.DATASET) - load_dataset() - self.assertTrue(os.path.isfile(dataset_path)) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_main.py b/test/test_main.py deleted file mode 100644 index 1e8a38a..0000000 --- a/test/test_main.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from pyro_risks.pipeline import load_dataset -from pyro_risks.main import main -from pyro_risks.pipeline import train_pipeline -from imblearn.pipeline import Pipeline -from sklearn.dummy import DummyClassifier -from click.testing import CliRunner -import pyro_risks.config as cfg -import requests - -import unittest -import tempfile -import glob -import os - - -class MainTester(unittest.TestCase): - def test_download_dataset(self): - runner = CliRunner() - pattern = "/*.csv" - with tempfile.TemporaryDirectory() as destination: - runner.invoke(main, ["download", "dataset", "--destination", destination]) - files = glob.glob(destination + pattern) - self.assertTrue(any([cfg.DATASET in file for file in files])) - - def test_download_inputs(self): - runner = CliRunner() - pattern = "/*.csv" - with tempfile.TemporaryDirectory() as directory: - runner.invoke( - main, - ["download", "inputs", "--day", "2020-05-05", "--directory", directory], - ) - files = glob.glob(directory + pattern) - self.assertTrue( - any(["inputs_France_2020-05-05.csv" in file for file in files]) - ) - - def test_train_pipeline(self): - runner = CliRunner() - pattern = "/*.joblib" - with tempfile.TemporaryDirectory() as destination: - runner.invoke( - main, ["train", "--model", "RF", "--destination", destination] - ) - files = glob.glob(destination + pattern) - self.assertTrue(any(["RF" in file for file in files])) - - def test_evaluate_pipeline(self): - runner = CliRunner() - pattern = "/*.joblib" - X, y = load_dataset() - - dummy_pipeline = Pipeline( - [("dummy_classifier", DummyClassifier(strategy="constant", constant=0))] - ) - - with tempfile.TemporaryDirectory() as destination: - threshold = destination + "/DUMMY_threshold.json" - train_pipeline( - X=X, - y=y, - model="DUMMY", - pipeline=dummy_pipeline, - destination=destination, - ignore_prints=True, - ignore_html=True, - ) - pipeline_path = glob.glob(destination + pattern) - runner.invoke( - main, - [ - "evaluate", - "--pipeline", - pipeline_path[0], - "--threshold", - threshold, - "--prefix", - "DUMMY", - "--destination", - destination, - ], - ) - files = glob.glob(destination + "/*") - self.assertTrue(any([".png" in file for file in files])) - self.assertTrue(any([".json" in file for file in files])) - self.assertTrue(any([".csv" in file for file in files])) - - def test_predict(self): - # TODO - # Test with today date after bugfix - inputs_fname = "inputs_France_2020-05-05.csv" - pipeline_fname = "RF.joblib" - mock_inputs = requests.get( - url="https://github.com/pyronear/pyro-risks/releases/download/v0.1.0-data/inputs_France_2020-05-05.csv" - ) - mock_pipeline = requests.get( - url="https://github.com/pyronear/pyro-risks/releases/download/v0.1.0-data/RF.joblib" - ) - - runner = CliRunner() - with tempfile.TemporaryDirectory() as directory: - with open(os.path.join(directory, inputs_fname), "wb") as inputs: - inputs.write(mock_inputs.content) - - with open(os.path.join(directory, pipeline_fname), "wb") as pipeline: - pipeline.write(mock_pipeline.content) - runner.invoke( - main, ["predict", "--day", "2020-05-05", "--directory", directory] - ) - - files = glob.glob(directory + "/*") - print(files) - self.assertTrue( - any(["inputs_France_2020-05-05.csv" in file for file in files]) - ) - self.assertTrue( - any( - [ - "RF_predictions_France_2020-05-05.joblib" in file - for file in files - ] - ) - ) - self.assertTrue(any(["RF.joblib" in file for file in files])) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_models_transformers.py b/test/test_models_transformers.py deleted file mode 100644 index fb345d7..0000000 --- a/test/test_models_transformers.py +++ /dev/null @@ -1,234 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import unittest -import numpy as np -import pandas as pd - -from pandas.testing import assert_frame_equal, assert_series_equal - -from pyro_risks.models import ( - TargetDiscretizer, - CategorySelector, - Imputer, - LagTransformer, - FeatureSelector, - FeatureSubsetter, -) - - -class TransformersTester(unittest.TestCase): - def test_target_discretizer(self): - td = TargetDiscretizer(discretizer=lambda x: 1 if x > 0 else 0) - df = pd.DataFrame( - { - "day": ["2019-07-01", "2019-08-02", "2019-06-12"], - "departement": ["Aisne", "Cantal", "Savoie"], - "fires": [0, 5, 10], - "fwi_mean": [13.3, 0.9, 2.5], - "ffmc_max": [23, 45.3, 109.0], - } - ) - X = df.drop(columns=["fires"]) - y = df["fires"] - - Xr, yr = td.fit_resample(X, y) - assert_series_equal(yr, pd.Series([0, 1, 1], name="fires")) - assert_frame_equal(Xr, X) - self.assertRaises(TypeError, TargetDiscretizer, [0, 1]) - - def test_category_selector(self): - cs = CategorySelector(variable="departement", category=["Aisne", "Cantal"]) - df = pd.DataFrame( - { - "day": ["2019-07-01", "2019-08-02", "2019-06-12"], - "departement": ["Aisne", "Cantal", "Savoie"], - "fires": [0, 5, 10], - "fwi_mean": [13.3, 0.9, 2.5], - "ffmc_max": [23, 45.3, 109.0], - } - ) - X = df.drop(columns=["fires"]) - y = df["fires"] - - Xr, yr = cs.fit_resample(X, y) - - self.assertRaises(TypeError, CategorySelector, "departement", 0) - assert_frame_equal(Xr, X[X["departement"].isin(["Aisne", "Cantal"])]) - assert_series_equal(yr, y[X["departement"].isin(["Aisne", "Cantal"])]) - - # pylint: disable=R0201 - def test_imputer(self): - imp = Imputer(strategy="median", columns=["fwi_mean"]) - df = pd.DataFrame( - { - "fires": [0, 5, 10], - "fwi_mean": [13.3, np.nan, 2.5], - "ffmc_max": [23, np.nan, 109.0], - } - ) - - X = df.drop(columns=["fires"]) - y = df["fires"] - - imp.fit(X, y) - - XT = imp.transform(X) - - assert_frame_equal( - XT, - pd.DataFrame( - { - "fwi_mean": [13.3, 7.9, 2.5], - "ffmc_max": [23, np.nan, 109.0], - } - ), - ) - - def test_lag_transformer(self): - lt = LagTransformer( - date_column="date", zone_column="departement", columns=["fwi_mean"] - ) - df = pd.DataFrame( - { - "date": [ - np.datetime64("2019-07-01"), - np.datetime64("2019-07-04"), - np.datetime64("2019-07-07"), - np.datetime64("2019-07-08"), - ], - "departement": ["Cantal", "Cantal", "Cantal", "Cantal"], - "fwi_mean": [1.1, 13.3, 0.9, 2.5], - "fires": [0, 5, 10, 10], - } - ) - res = pd.DataFrame( - { - "date": [ - np.datetime64("2019-07-01"), - np.datetime64("2019-07-04"), - np.datetime64("2019-07-07"), - np.datetime64("2019-07-08"), - ], - "departement": ["Cantal", "Cantal", "Cantal", "Cantal"], - "fwi_mean": [1.1, 13.3, 0.9, 2.5], - "fwi_mean_lag1": [np.nan, np.nan, np.nan, 0.9], - "fwi_mean_lag3": [np.nan, 1.1, 13.3, np.nan], - "fwi_mean_lag7": [np.nan, np.nan, np.nan, 1.1], - } - ) - - X = df.drop(columns=["fires"]) - y = df["fires"] - - lt.fit(X, y) - - X = lt.transform(X) - - pd.DataFrame( - { - "day": ["2019-07-01", "2019-08-02", "2019-06-12"], - "departement": ["Aisne", "Cantal", "Savoie"], - "fwi_mean": [13.3, 0.9, 2.5], - "ffmc_max": [23, 45.3, 109.0], - } - ) - - assert_frame_equal(res, X) - self.assertRaises( - TypeError, - LagTransformer.transform, - pd.DataFrame( - { - "day": ["2019-07-01", "2019-08-02", "2019-06-12"], - "departement": ["Aisne", "Cantal", "Savoie"], - "fwi_mean": [13.3, 0.9, 2.5], - "ffmc_max": [23, 45.3, 109.0], - } - ), - ) - - # pylint: disable=R0201 - def test_feature_selector(self): - fs = FeatureSelector( - exclude=["date", "department"], method="pearson", threshold=0.15 - ) - df = pd.DataFrame( - { - "date": [ - np.datetime64("2019-07-01"), - np.datetime64("2019-07-04"), - np.datetime64("2019-07-06"), - np.datetime64("2019-07-07"), - np.datetime64("2019-07-08"), - ], - "departement": ["Cantal", "Cantal", "Cantal", "Cantal", "Cantal"], - "str_mean": [2, 3, 4, 0, 0], - "ffmc_min": [0, 0, 0, 0, 0], - "isi_mean": [3, 0, 1, 4, 5], - "fires": [1, 1, 1, 0, 0], - } - ) - - X = df.drop(columns=["fires"]) - y = df["fires"] - - fs.fit(X, y) - X = fs.transform(X) - - res = pd.DataFrame( - { - "str_mean": [2, 3, 4, 0, 0], - "isi_mean": [3, 0, 1, 4, 5], - } - ) - - assert_frame_equal(res, X) - - # pylint: disable=R0201 - def test_feature_subsetter(self): - fs = FeatureSubsetter(columns=["date", "departement", "str_mean"]) - df = pd.DataFrame( - { - "date": [ - np.datetime64("2019-07-01"), - np.datetime64("2019-07-04"), - np.datetime64("2019-07-06"), - np.datetime64("2019-07-07"), - np.datetime64("2019-07-08"), - ], - "departement": ["Cantal", "Cantal", "Cantal", "Cantal", "Cantal"], - "str_mean": [2, 3, 4, 0, 0], - "ffmc_min": [0, 0, 0, 0, 0], - "isi_mean": [3, 0, 1, 4, 5], - "fires": [1, 1, 1, 0, 0], - } - ) - - X = df.drop(columns=["fires"]) - y = df["fires"] - - fs.fit(X, y) - X = fs.transform(X) - - res = pd.DataFrame( - { - "date": [ - np.datetime64("2019-07-01"), - np.datetime64("2019-07-04"), - np.datetime64("2019-07-06"), - np.datetime64("2019-07-07"), - np.datetime64("2019-07-08"), - ], - "departement": ["Cantal", "Cantal", "Cantal", "Cantal", "Cantal"], - "str_mean": [2, 3, 4, 0, 0], - } - ) - - assert_frame_equal(res, X) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_models_utils.py b/test/test_models_utils.py deleted file mode 100644 index a524e99..0000000 --- a/test/test_models_utils.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import unittest - -import numpy as np -from pyro_risks.models import check_xy, check_x, discretizer - - -class UtilsTester(unittest.TestCase): - def test_check_xy(self): - self.assertRaises( - TypeError, check_xy, np.array([[0, 0, 0], [0, 0, 0]]), np.array([0, 1]) - ) - - def test_check_x(self): - self.assertRaises( - TypeError, check_x, np.array([[0, 0, 0], [0, 0, 0]]), np.array([0, 1]) - ) - - def test_discretizer(self): - self.assertEqual(discretizer(5), 1) - self.assertEqual(discretizer(0), 0) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_predict.py b/test/test_predict.py deleted file mode 100644 index 8d342da..0000000 --- a/test/test_predict.py +++ /dev/null @@ -1,168 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -from pyro_risks.pipeline import PyroRisk -from pyro_risks import config as cfg - -import pandas as pd - -import requests -import imblearn -import unittest -import tempfile -import glob -import os - - -class PredictTester(unittest.TestCase): - def test_pyrorisk(self): - pyrorisk_rf = PyroRisk() - pyrorisk_xgb = PyroRisk(model="XGBOOST") - self.assertEqual(pyrorisk_rf.model, "RF") - self.assertEqual(pyrorisk_xgb.model, "XGBOOST") - self.assertEqual(pyrorisk_rf.model_path, cfg.RFMODEL_ERA5T_PATH) - self.assertEqual(pyrorisk_xgb.model_path, cfg.XGBMODEL_ERA5T_PATH) - self.assertEqual(pyrorisk_rf.predictions_registry, cfg.PREDICTIONS_REGISTRY) - self.assertEqual(pyrorisk_xgb.predictions_registry, cfg.PREDICTIONS_REGISTRY) - with self.assertRaises(ValueError): - PyroRisk(model="`Mock`") - - def test_get_pipeline(self): - pyrorisk = PyroRisk() - with tempfile.TemporaryDirectory() as dir_destination: - destination = f"{dir_destination}/RF.joblib" - pyrorisk.get_pipeline(destination=destination) - files = glob.glob(dir_destination + "/*") - self.assertTrue(any(["RF.joblib" in file for file in files])) - - def test_get_inputs(self): - pyrorisk = PyroRisk() - country = "France" - day = "2020-05-05" - with tempfile.TemporaryDirectory() as dir_destination: - pyrorisk.get_inputs( - day=day, country=country, dir_destination=dir_destination - ) - files = glob.glob(dir_destination + "/*") - self.assertTrue( - any([f"inputs_{country}_{day}.csv" in file for file in files]) - ) - - def test_load_pipeline(self): - pyrorisk = PyroRisk() - with tempfile.TemporaryDirectory() as dir_path: - path = dir_path + "/RF.joblib" - pyrorisk.load_pipeline(path=path) - files = glob.glob(dir_path + "/*") - self.assertTrue(isinstance(pyrorisk.pipeline, imblearn.pipeline.Pipeline)) - self.assertTrue(any(["RF.joblib" in file for file in files])) - pyrorisk.pipeline = None - pyrorisk.load_pipeline(path=path) - self.assertTrue(isinstance(pyrorisk.pipeline, imblearn.pipeline.Pipeline)) - - def test_load_inputs(self): - pyrorisk = PyroRisk() - country = "France" - day = "2020-05-05" - with tempfile.TemporaryDirectory() as dir_path: - pyrorisk.load_inputs(day=day, country=country, dir_path=dir_path) - files = glob.glob(dir_path + "/*") - self.assertTrue(isinstance(pyrorisk.inputs, pd.DataFrame)) - self.assertTrue( - any([f"inputs_{country}_{day}.csv" in file for file in files]) - ) - pyrorisk.inputs = None - pyrorisk.load_inputs(day=day, country=country, dir_path=dir_path) - self.assertTrue(isinstance(pyrorisk.inputs, pd.DataFrame)) - - def test_predict(self): - pyrorisk_rf = PyroRisk() - pyrorisk_xgb = PyroRisk(model="XGBOOST") - country = "France" - day = "2020-05-05" - inputs_fname = "inputs_France_2020-05-05.csv" - rf_pipeline_fname = "RF.joblib" - xgb_pipeline_fname = "XGBOOST.joblib" - mock_inputs = requests.get( - url="https://github.com/pyronear/pyro-risks/releases/download/v0.1.0-data/inputs_France_2020-05-05.csv" - ) - mock_rf_pipeline = requests.get( - url="https://github.com/pyronear/pyro-risks/releases/download/v0.1.0-data/RF.joblib" - ) - mock_xgb_pipeline = requests.get( - url="https://github.com/pyronear/pyro-risks/releases/download/v0.1.0-data/RF.joblib" - ) - - with tempfile.TemporaryDirectory() as dir_destination: - with open(os.path.join(dir_destination, inputs_fname), "wb") as inputs: - inputs.write(mock_inputs.content) - - with open( - os.path.join(dir_destination, rf_pipeline_fname), "wb" - ) as pipeline: - pipeline.write(mock_rf_pipeline.content) - with open( - os.path.join(dir_destination, xgb_pipeline_fname), "wb" - ) as pipeline: - pipeline.write(mock_xgb_pipeline.content) - pyrorisk_rf.predict( - day=day, country=country, dir_destination=dir_destination - ) - pyrorisk_xgb.predict( - day=day, country=country, dir_destination=dir_destination - ) - files = glob.glob(dir_destination + "/*") - self.assertTrue( - any( - [ - f"{pyrorisk_rf.model}_predictions_{country}_{day}.joblib" - in file - for file in files - ] - ) - ) - self.assertTrue( - any( - [ - f"{pyrorisk_xgb.model}_predictions_{country}_{day}.joblib" - in file - for file in files - ] - ) - ) - - def test_get_predictions(self): - pyrorisk = PyroRisk() - country = "France" - day = "2020-05-05" - with tempfile.TemporaryDirectory() as destination: - pyrorisk.get_predictions(day=day, dir_destination=destination) - files = glob.glob(destination + "/*") - self.assertTrue( - any( - [ - f"{pyrorisk.model}_predictions_{country}_{day}.joblib" in file - for file in files - ] - ) - ) - - def test_expose_predictions(self): - pyrorisk = PyroRisk() - day = "2020-05-05" - with tempfile.TemporaryDirectory() as destination: - predictions_dict = pyrorisk.expose_predictions( - day=day, dir_destination=destination - ) - predictions_load_dict = pyrorisk.expose_predictions( - day=day, dir_destination=destination - ) - - self.assertTrue(isinstance(predictions_dict, dict)) - self.assertTrue(isinstance(predictions_load_dict, dict)) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_train.py b/test/test_train.py deleted file mode 100644 index 239930e..0000000 --- a/test/test_train.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright (C) 2021-2022, Pyronear. - -# This program is licensed under the Apache License version 2. -# See LICENSE or go to for full license details. - -import unittest -from collections import namedtuple -import tempfile -import glob - - -import numpy as np -import pandas as pd -import pyro_risks.config as cfg - -from datetime import datetime -from imblearn.pipeline import Pipeline -from sklearn.dummy import DummyClassifier -from pyro_risks.models import xgb_pipeline, rf_pipeline -from pyro_risks.pipeline import ( - calibrate_pipeline, - save_pipeline, - train_pipeline, - load_dataset, -) - - -class TrainTester(unittest.TestCase): - def test_calibrate_pipeline(self): - y_true = np.array([0, 0, 1, 1]) - y_scores = np.array([[0.9, 0.1], [0.6, 0.4], [0.65, 0.35], [0.2, 0.8]]) - optimal_threshold = calibrate_pipeline(y_true, y_scores) - self.assertEqual(optimal_threshold, 0.35) - - def test_save_pipeline(self): - y_true = np.array([0, 0, 1, 1]) - y_scores = np.array([[0.9, 0.1], [0.6, 0.4], [0.65, 0.35], [0.2, 0.8]]) - optimal_threshold = calibrate_pipeline(y_true, y_scores) - model_pattern = "/*.joblib" - html_pattern = "/*.html" - registry = "/.model_registry" - - with tempfile.TemporaryDirectory() as destination: - save_pipeline( - pipeline=xgb_pipeline, - model="RF", - optimal_threshold=optimal_threshold, - destination=destination, - ignore_html=True, - ) - save_pipeline( - pipeline=rf_pipeline, - model="RF", - optimal_threshold=optimal_threshold, - destination=destination, - ignore_html=False, - ) - model_files = glob.glob(destination + model_pattern) - html_files = glob.glob(destination + html_pattern) - self.assertTrue(any(["RF" in file for file in model_files])) - self.assertTrue(any(["RF" in file for file in html_files])) - - with tempfile.TemporaryDirectory() as destination: - save_pipeline( - pipeline=xgb_pipeline, - model="XGBOOST", - optimal_threshold=optimal_threshold, - destination=destination + registry, - ignore_html=True, - ) - save_pipeline( - pipeline=rf_pipeline, - model="XGBOOST", - optimal_threshold=optimal_threshold, - destination=destination + registry, - ignore_html=False, - ) - model_files = glob.glob(destination + registry + model_pattern) - html_files = glob.glob(destination + registry + html_pattern) - self.assertTrue(any(["XGBOOST" in file for file in model_files])) - self.assertTrue(any(["XGBOOST" in file for file in html_files])) - - def test_train_pipeline(self): - X, y = load_dataset() - pattern = "/*.joblib" - - dummy_pipeline = Pipeline( - [("dummy_classifier", DummyClassifier(strategy="constant", constant=0))] - ) - with tempfile.TemporaryDirectory() as destination: - train_pipeline( - X=X, - y=y, - model="XGBOOST", - destination=destination, - ignore_prints=True, - ignore_html=True, - ) - train_pipeline( - X=X, - y=y, - model="RF", - destination=destination, - ignore_prints=True, - ignore_html=True, - ) - train_pipeline( - X=X, - y=y, - model="DUMMY", - pipeline=dummy_pipeline, - destination=destination, - ignore_prints=True, - ignore_html=True, - ) - files = glob.glob(destination + pattern) - self.assertTrue(any(["RF" in file for file in files])) - self.assertTrue(any(["XGBOOST" in file for file in files])) - self.assertTrue(any(["DUMMY" in file for file in files])) - - -if __name__ == "__main__": - unittest.main()