diff --git a/.github/workflows/publish-quarto.yml b/.github/workflows/publish-quarto.yml index ea3c14e..11f40fe 100644 --- a/.github/workflows/publish-quarto.yml +++ b/.github/workflows/publish-quarto.yml @@ -1,46 +1,39 @@ -name: Quarto Publish - -on: [push] - -jobs: - build-deploy: - runs-on: ubuntu-latest - permissions: - contents: write - steps: - - name: Check out repository - uses: actions/checkout@v4 - - - name: Install system dependencies - run : | - sudo apt-get update - sudo apt-get upgrade - sudo apt-get install libudunits2-dev libgdal-dev libcurl4-openssl-dev - - - name: Set up python - id: setup-python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - virtualenvs-create: true - virtualenvs-in-project: true - installer-parallel: true - - - name: Install the project - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - run: | - poetry install --no-interaction --no-root - - - name: Set up Quarto - uses: quarto-dev/quarto-actions/setup@v2 - - - name: Render and Publish - uses: quarto-dev/quarto-actions/publish@v2 - with: - target: gh-pages - env: +name: Quarto Publish + +on: [push] + +jobs: + build-deploy: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Install system dependencies + run : | + sudo apt-get update + sudo apt-get upgrade + sudo apt-get install libudunits2-dev libgdal-dev libcurl4-openssl-dev + + - name: Set up python + id: setup-python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install --upgrade pip + pip install -r <(python -c 'import toml; print("\n".join(toml.load("pyproject.toml")["project"]["dependencies"]))') + + - name: Set up Quarto + uses: quarto-dev/quarto-actions/setup@v2 + + - name: Render and Publish + uses: quarto-dev/quarto-actions/publish@v2 + with: + target: gh-pages + env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 22f61d5..9bef6b1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,59 +1,42 @@ -name: Pytest - -on: [push] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [3.9, '3.10', 3.11, 3.12] - - steps: - - name: Check out repository - uses: actions/checkout@v4 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Install system dependencies - run: | - sudo apt-get update -y - sudo apt-get install gdal-bin libproj-dev libgdal-dev proj-bin - - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - virtualenvs-create: true - virtualenvs-in-project: true - installer-parallel: true - - - name: Install package dependencies - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - run: poetry install --no-interaction --no-root - - - name: Install library - run: poetry install --no-interaction - - # - name: Lint with flake8 - # run: | - # source .venv/bin/activate - # # stop the build if there are Python syntax errors or undefined names - # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - - name: Test with pytest - run: | - poetry run pytest --cov-report=xml --cov=. - - - name: "Upload Report to Codecov" - uses: codecov/codecov-action@v4.2.0 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - with: - file: ./coverage.xml +name: Pytest + +on: [push] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.9, '3.10', 3.11, 3.12] + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install system dependencies + run: | + sudo apt-get update -y + sudo apt-get install gdal-bin libproj-dev libgdal-dev proj-bin + + - name: Install dependencies + run: | + pip install --upgrade pip + pip install -r <(python -c 'import toml; print("\n".join(toml.load("pyproject.toml")["project"]["dependencies"]))') + + - name: Test with pytest + run: | + poetry run pytest --cov-report=xml --cov=. + + - name: "Upload Report to Codecov" + uses: codecov/codecov-action@v4.2.0 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + with: + file: ./coverage.xml fail_ci_if_error: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index e44e70e..0fb4366 100644 --- a/.gitignore +++ b/.gitignore @@ -1,15 +1,16 @@ -.DS_Store -.eggs/ -.idea/ -.vscode/ -.pytest_cache/ -*__pycache__/ -build/ -*egg* -*whl -*.tar.gz -**/*.ipynb_checkpoints/ - -/.quarto/ -_site/ -site/ +.DS_Store +.eggs/ +.idea/ +.vscode/ +.pytest_cache/ +*__pycache__/ +build/ +*egg* +*whl +*.tar.gz +**/*.ipynb_checkpoints/ +.conda/ + +/.quarto/ +_site/ +site/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a92ee2b..6cab15f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,16 +1,16 @@ -repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-added-large-files - - id: double-quote-string-fixer - -- repo: https://github.com/psf/black-pre-commit-mirror - rev: 24.4.2 - hooks: - - id: black - name: black (python) - language_version: python3.11 +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - id: double-quote-string-fixer + +- repo: https://github.com/psf/black-pre-commit-mirror + rev: 24.4.2 + hooks: + - id: black + name: black (python) + language_version: python3.11 diff --git a/.python-version b/.python-version index 2419ad5..a9d3e48 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.11.9 +3.11.9 diff --git a/LICENSE b/LICENSE index 9cecc1d..c65825e 100644 --- a/LICENSE +++ b/LICENSE @@ -1,674 +1,674 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - {one line to give the program's name and a brief idea of what it does.} - Copyright (C) {year} {name of author} - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - {project} Copyright (C) {year} {fullname} - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {one line to give the program's name and a brief idea of what it does.} + Copyright (C) {year} {name of author} + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + {project} Copyright (C) {year} {fullname} + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/README.md b/README.md index ef18404..284baaf 100644 --- a/README.md +++ b/README.md @@ -1,433 +1,433 @@ -[![Codecov test coverage](https://codecov.io/gh/stevenpawley/pyspatialml/branch/master/graph/badge.svg)](https://codecov.io/gh/stevenpawley/pyspatialml?branch=master) -![Pytest](https://github.com/stevenpawley/pyspatialml/actions/workflows/tests.yml/badge.svg) - -# Pyspatialml -Machine learning classification and regression modelling for spatial raster data. - -## Description -`Pyspatialml` is a Python module for applying scikit-learn machine learning models to 'stacks' of raster datasets. -Pyspatialml includes functions and classes for working with multiple raster datasets and performing a typical machine -learning workflow consisting of extracting training data and applying the predict or predict_proba methods of -scikit-learn estimators to a stack of raster datasets. Pyspatialml is built upon the `rasterio` Python module for -all of the heavy lifting, and is also designed for working with vector data using the `geopandas` module. - -For more information read the documents page at: https://stevenpawley.github.io/Pyspatialml/ - -## Rationale -A typical supervised machine-learning workflow involving raster datasets consists of several steps: - -1. Using vector features or labelled pixels to extract training data from a stack of raster-based predictors (e.g. -spectral bands, terrain derivatives, or climate grids). The training data represent locations when some -property/state/concentration is already established, and might comprise point locations of arsenic concentrations, or -labelled pixels with integer-encoded values that correspond to known landcover types. - -2. Developing a machine learning classification or regression model on the training data. Pyspatialml is designed to use -scikit-learn compatible api's for this purpose. - -3. Applying the fitted machine learning model to make predictions on all of the pixels in the stack of raster data. - -Pyspatialml is designed to make it easy to develop spatial prediction models on stacks of 2D raster datasets that are -held on disk. Unlike using python's `numpy` module directly where raster datasets need to be held in memory, -the majority of functions within pyspatialml work with raster datasets that are stored on disk and allow processing -operations to be performed on datasets that are too large to be loaded into memory. - -## Design - -### Raster and RasterLayer objects - -The main class that facilitates working with multiple raster datasets is the `Raster` class, which is inspired by -the famous ```raster``` package in the R statistical programming language. The `Raster` object takes a list of file -paths to GDAL-supported raster datasets and 'stacks' them into a single Raster object. The underlying file-based raster -datasets are not physically-stacked, but rather the Raster object internally represents each band within the datasets as -a `RasterLayer`. This means that metadata regarding what each raster dataset represents (e.g. the dataset's name) -can be retained, and additional raster datasets can be added or removed from the stack without physical on disk changes. - -Note these raster datasets need to be spatially aligned in terms of their extent, resolution and coordinate reference -system. - -### Usage - -There are four methods of creating a new Raster object: - -1. `Raster([raster1.tif, raster2.tif, raster3.tif])` creates a Raster object from existing file-based -GDAL-supported datasets. - -2. `Raster(new_numpy_array, crs=crs, transform=transform)` creates a Raster object from a 3D numpy array (band, -row, column). The `crs` and `transform` arguments are optional but are required to provide coordinate reference -system information to the Raster object. The crs argument has to be represented by `rasterio.crs.CRS` object, and -the transform parameter requires a `affine.Affine` object. - -3. `Raster([RasterLayer1, RasterLayer2, RasterLayer3])` creates a Raster object from a single or list of -RasterLayer objects. RasterLayers are a thin wrapper around rasterio.Band objects with additional methods. This is -mostly used internally. A RasterLayer itself is initiated directly from a rasterio.Band object. - -4. `Raster([src0, src1, src2])` where the list elements are `rasterio.io.datasetreader` objects, i.e. raster -datasets that have been opened using the `rasterio.open` method. - -Generally, pyspatialml intends users to work with the Raster object. However, access to individual RasterLayer -objects, or the underlying rasterio.band datasets can be useful if pyspatialml is being used in conjunction with other -functions and methods in the Rasterio package. - -## Installation - -The package is now available on PyPI, but can also be installed from GitHub directly via: - -``` -pip install git+https://github.com/stevenpawley/Pyspatialml -``` - -## Quickstart - -This is an example using the imagery data that is bundled with the package. This data is derived from the GRASS GIS -North Carolina dataset and comprises Landsat 7 VNIR and SWIR bands along with some land cover training data that were -derived from a land cover classification from an earlier date. - -First, import the extract and predict functions: - -``` -from pyspatialml import Raster -from copy import deepcopy -import os -import tempfile -import geopandas -import rasterio.plot -import matplotlib.pyplot as plt -``` - -### Stacking - -We are going to use a set of Landsat 7 bands contained within the nc_dataset: - -``` -import pyspatialml.datasets.nc as nc -predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] -``` - -These raster datasets are aligned in terms of their extent and coordinate reference systems. We can 'stack' these into a -Raster class so that we can perform machine learning related operations on the set of rasters: - -``` -stack = Raster(predictors) -``` - -Upon 'stacking', syntactically-correct names for each RasterLayer are automatically generated from the file_paths. - -### Indexing - -Indexing of Raster objects is provided by several methods: - -- Raster[keys] : subsets a Raster using key-based indexing based on the names of the RasterLayers. If a single key is -supplied then a RasterLayer is returned, otherwise a Raster object is returned contained the subsetted layers. -- Raster.iloc[int, list, tuple, slice] : subsets a Raster using integer-based indexing or slicing. If a single key is -supplied then a RasterLayer is returned, otherwise a Raster object is returned contained the subsetted layers. -- Raster.name : attribute names can be used directly, and always returns a single RasterLayer object. - -RasterLayer indexing which returns a RasterLayer: - -``` -# index by integer position -rasterlayer = stack.iloc[0] -rasterstack = stack.iloc[0:3] - -# index by name -rasterlayer = stack['lsat7_2000_10'] -rasterstack = stack[('lsat7_2000_10', 'lsat7_2000_20')] - -# index by atttribute -rasterlayer = stack.lsat7_2000_10 -``` - -Iterate through RasterLayers: - -``` -for name, layer in stack: - print(layer) -``` - -Subset a Raster object: - -``` -subset_raster = stack[['lsat7_2000_10', 'lsat7_2000_70']] -subset_raster.names -``` - -Replace a RasterLayer with another: - -``` -stack.iloc[0] = Raster(nc.band7).iloc[0] -``` - -Append layers from another Raster to the stack. Note that this occurs in-place. Duplicate names are automatically given -a suffix: - -``` -stack.append(Raster(nc.band7)) -stack.names -``` - -Rename RasterLayers using a dict of old_name : new_name pairs: - -``` -stack.names -stack.rename({'lsat7_2000_30': 'new_name'}, in_place=True) -stack.names -stack.new_name -stack['new_name'] -stack.loc['new_name'] -``` - -We can also change all of the column names by replacing them: - -``` -stack.names = ["band1", "band2", "band3", "band4", "band5", "band7"] -``` - -Drop a RasterLayer: - -``` -stack.names -stack.drop(labels='band1', in_place=True) -stack.names -``` - -Save a Raster: - -``` -tmp_tif = tempfile.NamedTemporaryFile().name + '.tif' -newstack = stack.write(file_path=tmp_tif, nodata=-9999) -newstack.band2.read() -``` - -### Plotting - -Basic plotting has been added to as a method to RasterLayer and Raster options. More controls on plotting will be added -in the future. Currently you can set a matplotlib cmap for each RasterLayer using the RasterLayer.cmap attribute. - -Plot a single RasterLayer: - -``` -from matplotlib.colors import Normalize - -stack = Raster(predictors) -stack.lsat7_2000_10.cmap = 'plasma' -stack.lsat7_2000_10.norm = Normalize(20, 210) -stack.lsat7_2000_10.plot() -``` - -Plot all RasterLayers in a Raster object: - -``` -stack.plot() -``` - -### Integration with Pandas - -Data from a Raster object can be converted into a Pandas dataframe, with each pixel representing by a row, and columns -reflecting the x, y coordinates and the values of each RasterLayer in the Raster object: - -``` -df = stack.to_pandas(max_pixels=50000, resampling='nearest') -df.head() -``` - -The original raster is up-sampled based on max_pixels and the resampling method, which uses all of resampling methods -available in the underlying rasterio library for decimated reads. The Raster.to_pandas method can be useful for plotting -datasets, or combining with a library such as plotnine to create ggplot2-style plots of stacks of RasterLayers: - -``` -from plotnine import * -(ggplot(df.melt(id_vars=['x', 'y']), aes(x='x', y='y', fill='value')) + -geom_tile() + facet_wrap('variable')) -``` - -## A Machine Learning Workflow - -### Extract Training Data - -Load some training data in the form of polygons, points and labelled pixels in geopandas GeoDataFrame objects. We will -also generate some line geometries by converting the polygon boundaries into linestrings. All of these geometry types -can be used to spatially query pixel values in a Raster object, however each GeoDataFrame must contain only one type of -geometry (i.e. either shapely points, polygons or linestrings). - -``` -training_py = geopandas.read_file(nc.polygons) -training_pt = geopandas.read_file(nc.points) -training_px = rasterio.open(nc.labelled_pixels) -training_lines = deepcopy(training_py) -training_lines['geometry'] = training_lines.geometry.boundary -``` - -Show training data points and a single raster band using numpy and matplotlib: - -``` -stack = Raster(predictors) -plt.imshow(stack.lsat7_2000_70.read(masked=True), - extent=rasterio.plot.plotting_extent(stack.lsat7_2000_70)) -plt.scatter(x=training_pt.bounds.iloc[:, 0], - y=training_pt.bounds.iloc[:, 1], - s=2, color='black') -plt.show() -``` - -Pixel values in the Raster object can be spatially queried using the `extract_vector` and `extract_raster` -methods. In addition, the `extract_xy` method can be used to query pixel values using a 2d array of x and y -coordinates. - -The `extract_vector` method accepts a Geopandas GeoDataFrame as the `gdf` argument. For -GeoDataFrames containing shapely point geometries, the closest pixel to each point is sampled. For shapely polygon -geometries, all pixels whose centres are inside the polygon are sampled. For shapely linestring geometries, every pixel -touched by the line is sampled. For all geometry types, pixel values are queries for each geometry separately. This -means that overlapping polygons or points that fall within the same pixel with cause the same pixel to be sampled -multiple times. - -By default, the extract functions return a Geopandas GeoDataFrame of point geometries and the DataFrame containing the -extracted pixels, with the column names set by the names of the raster datasets in the Raster object. The user can also -use the `return_array=True` argument, which instead of returning a DataFrame will return three masked numpy arrays -(id, X, coordinates) containing geodataframe indices, the extracted pixel values, and the spatial coordinates of the sampled -pixels. These arrays are masked arrays with nodata values in the RasterStack datasets being masked. - -The `extract_raster` method can also be used to spatially query pixel values from a Raster object using another -raster containing labelled pixels. This raster has to be spatially aligned with the Raster object. This method also returns -the values of the labelled pixels along with the queried pixel values. - -``` -# Create a training dataset by extracting the raster values at the training point locations: -df_points = stack.extract_vector(training_pt) -df_polygons = stack.extract_vector(training_py) -df_lines = stack.extract_vector(training_lines) -df_raster = stack.extract_raster(training_px) - -df_points.head() - -# join the extracted pixel data back with the training data -df_points = df_points.droplevel(0).merge( - training_pt.loc[:, ("id")], - left_index=True, - right_index=True -) -df_points = df_points.dropna() -``` - -### Model Training - -Next we can train a logistic regression classifier: - -``` -# Next we can train a logistic regression classifier: -from sklearn.linear_model import LogisticRegressionCV -from sklearn.preprocessing import StandardScaler -from sklearn.pipeline import Pipeline -from sklearn.model_selection import cross_validate - -# define the classifier with standardization of the input features in a pipeline -lr = Pipeline( - [('scaling', StandardScaler()), - ('classifier', LogisticRegressionCV(n_jobs=-1))]) - -# fit the classifier -X = df_points.drop(columns=["geometry", "id"]) -y = df_points.id -lr.fit(X, y) -```` - -After defining a classifier, a typical step consists of performing a cross-validation to evaluate the performance of the -model. Scikit-learn provides the cross_validate function for this purpose. In comparison to non-spatial data, spatial -data can be spatially correlated, which potentially can mean that geographically proximal samples may not represent -independent samples if they are within the autocorrelation range of some of the predictors. This will lead to overly -optimistic performance measures if samples in the training dataset / cross-validation partition are strongly spatially -correlated with samples in the test dataset / cross-validation partition. - -In this case, performing cross-validation using groups is useful, because these groups can represent spatial clusters of -training samples, and samples from the same group will never occur in both the training and test partitions of a -cross-validation. An example of creating random spatial clusters from point coordinates is provided here: - -``` -# spatial cross-validation -from sklearn.cluster import KMeans - -# create 10 spatial clusters based on clustering of the training data point x,y coordinates -clusters = KMeans(n_clusters=34, n_jobs=-1) -clusters.fit(df_points.geometry.bounds.iloc[:, 0:2]) - -# cross validate -scores = cross_validate( - lr, X, y, groups=clusters.labels_, - scoring='accuracy', - cv=3, n_jobs=1) -scores['test_score'].mean() -``` - -### Raster Prediction - -Prediction on the Raster object is performed using the `predict` and `predict_proba` methods. The `estimator` is the only required -argument. If the `file_path` argument is not specified then the result is automatically written to a temporary file. -The predict method returns an rasterio.io.DatasetReader object which is open. For probability prediction, -`indexes` can also be supplied if you only want to output the probabilities for a particular class, or list of -classes, by supplying the indices of those classes: - -``` -# prediction -result = stack.predict(estimator=lr, dtype='int16', nodata=0) -result_probs = stack.predict_proba(estimator=lr) - -# plot classification result -result.plot() -plt.show() - -# plot class probabilities -result_probs.plot() -plt.show() -``` - -## Sampling Tools - -For many spatial models, it is common to take a random sample of the predictors to represent a single class (i.e. an -environmental background or pseudo-absences in a binary classification model). The sample function is supplied in the -sampling module for this purpose: -``` -# extract training data using a random sample -df_rand = stack.sample(size=100, random_state=1) -df_rand.plot() -``` - -The sample function also enables stratified random sampling based on passing a categorical raster dataset to the strata -argument. The categorical raster should spatially overlap with the dataset to be sampled, but it does not need to be of -the same grid resolution. This raster should be passed as another `Raster` dataset containing a single categorical layer: - -``` -strata = Raster(nc.strata) -df_strata = stack.sample(size=5, strata=strata, random_state=1) -df_strata = df_strata.dropna() - -fig, ax = plt.subplots() -ax.imshow(strata.read(1, masked=True), extent=rasterio.plot.plotting_extent(strata), cmap='tab10') -df_strata.plot(ax=ax, markersize=20, color='white') -plt.show() -``` - -## Vector Data Tools - -In some cases, we don't need all of the training data, but rather would spatially thin a point dataset. The -filter_points function performs point-thinning based on a minimum distance buffer on a geopandas dataframe containing -point geometries: - -``` -from pyspatialml.vector import filter_points - -thinned_points = filter_points(training_pt, min_dist=500, remove='first') -thinned_points.shape -``` - -We can also generate random points within polygons using the get_random_point_in_polygon function. This requires a -shapely POLYGON geometry as an input, and returns a shapely POINT object: - -``` -from pyspatialml.vector import get_random_point_in_polygon - -# generate 5 random points in a single polygon -random_points = [get_random_point_in_polygon(training_py.geometry[0]) for i in range(5)] - -# convert to a GeoDataFrame -random_points = geopandas.GeoDataFrame( - geometry=geopandas.GeoSeries(random_points)) -``` +[![Codecov test coverage](https://codecov.io/gh/stevenpawley/pyspatialml/branch/master/graph/badge.svg)](https://codecov.io/gh/stevenpawley/pyspatialml?branch=master) +![Pytest](https://github.com/stevenpawley/pyspatialml/actions/workflows/tests.yml/badge.svg) + +# Pyspatialml +Machine learning classification and regression modelling for spatial raster data. + +## Description +`Pyspatialml` is a Python module for applying scikit-learn machine learning models to 'stacks' of raster datasets. +Pyspatialml includes functions and classes for working with multiple raster datasets and performing a typical machine +learning workflow consisting of extracting training data and applying the predict or predict_proba methods of +scikit-learn estimators to a stack of raster datasets. Pyspatialml is built upon the `rasterio` Python module for +all of the heavy lifting, and is also designed for working with vector data using the `geopandas` module. + +For more information read the documents page at: https://stevenpawley.github.io/Pyspatialml/ + +## Rationale +A typical supervised machine-learning workflow involving raster datasets consists of several steps: + +1. Using vector features or labelled pixels to extract training data from a stack of raster-based predictors (e.g. +spectral bands, terrain derivatives, or climate grids). The training data represent locations when some +property/state/concentration is already established, and might comprise point locations of arsenic concentrations, or +labelled pixels with integer-encoded values that correspond to known landcover types. + +2. Developing a machine learning classification or regression model on the training data. Pyspatialml is designed to use +scikit-learn compatible api's for this purpose. + +3. Applying the fitted machine learning model to make predictions on all of the pixels in the stack of raster data. + +Pyspatialml is designed to make it easy to develop spatial prediction models on stacks of 2D raster datasets that are +held on disk. Unlike using python's `numpy` module directly where raster datasets need to be held in memory, +the majority of functions within pyspatialml work with raster datasets that are stored on disk and allow processing +operations to be performed on datasets that are too large to be loaded into memory. + +## Design + +### Raster and RasterLayer objects + +The main class that facilitates working with multiple raster datasets is the `Raster` class, which is inspired by +the famous ```raster``` package in the R statistical programming language. The `Raster` object takes a list of file +paths to GDAL-supported raster datasets and 'stacks' them into a single Raster object. The underlying file-based raster +datasets are not physically-stacked, but rather the Raster object internally represents each band within the datasets as +a `RasterLayer`. This means that metadata regarding what each raster dataset represents (e.g. the dataset's name) +can be retained, and additional raster datasets can be added or removed from the stack without physical on disk changes. + +Note these raster datasets need to be spatially aligned in terms of their extent, resolution and coordinate reference +system. + +### Usage + +There are four methods of creating a new Raster object: + +1. `Raster([raster1.tif, raster2.tif, raster3.tif])` creates a Raster object from existing file-based +GDAL-supported datasets. + +2. `Raster(new_numpy_array, crs=crs, transform=transform)` creates a Raster object from a 3D numpy array (band, +row, column). The `crs` and `transform` arguments are optional but are required to provide coordinate reference +system information to the Raster object. The crs argument has to be represented by `rasterio.crs.CRS` object, and +the transform parameter requires a `affine.Affine` object. + +3. `Raster([RasterLayer1, RasterLayer2, RasterLayer3])` creates a Raster object from a single or list of +RasterLayer objects. RasterLayers are a thin wrapper around rasterio.Band objects with additional methods. This is +mostly used internally. A RasterLayer itself is initiated directly from a rasterio.Band object. + +4. `Raster([src0, src1, src2])` where the list elements are `rasterio.io.datasetreader` objects, i.e. raster +datasets that have been opened using the `rasterio.open` method. + +Generally, pyspatialml intends users to work with the Raster object. However, access to individual RasterLayer +objects, or the underlying rasterio.band datasets can be useful if pyspatialml is being used in conjunction with other +functions and methods in the Rasterio package. + +## Installation + +The package is now available on PyPI, but can also be installed from GitHub directly via: + +``` +pip install git+https://github.com/stevenpawley/Pyspatialml +``` + +## Quickstart + +This is an example using the imagery data that is bundled with the package. This data is derived from the GRASS GIS +North Carolina dataset and comprises Landsat 7 VNIR and SWIR bands along with some land cover training data that were +derived from a land cover classification from an earlier date. + +First, import the extract and predict functions: + +``` +from pyspatialml import Raster +from copy import deepcopy +import os +import tempfile +import geopandas +import rasterio.plot +import matplotlib.pyplot as plt +``` + +### Stacking + +We are going to use a set of Landsat 7 bands contained within the nc_dataset: + +``` +import pyspatialml.datasets.nc as nc +predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] +``` + +These raster datasets are aligned in terms of their extent and coordinate reference systems. We can 'stack' these into a +Raster class so that we can perform machine learning related operations on the set of rasters: + +``` +stack = Raster(predictors) +``` + +Upon 'stacking', syntactically-correct names for each RasterLayer are automatically generated from the file_paths. + +### Indexing + +Indexing of Raster objects is provided by several methods: + +- Raster[keys] : subsets a Raster using key-based indexing based on the names of the RasterLayers. If a single key is +supplied then a RasterLayer is returned, otherwise a Raster object is returned contained the subsetted layers. +- Raster.iloc[int, list, tuple, slice] : subsets a Raster using integer-based indexing or slicing. If a single key is +supplied then a RasterLayer is returned, otherwise a Raster object is returned contained the subsetted layers. +- Raster.name : attribute names can be used directly, and always returns a single RasterLayer object. + +RasterLayer indexing which returns a RasterLayer: + +``` +# index by integer position +rasterlayer = stack.iloc[0] +rasterstack = stack.iloc[0:3] + +# index by name +rasterlayer = stack['lsat7_2000_10'] +rasterstack = stack[('lsat7_2000_10', 'lsat7_2000_20')] + +# index by atttribute +rasterlayer = stack.lsat7_2000_10 +``` + +Iterate through RasterLayers: + +``` +for name, layer in stack: + print(layer) +``` + +Subset a Raster object: + +``` +subset_raster = stack[['lsat7_2000_10', 'lsat7_2000_70']] +subset_raster.names +``` + +Replace a RasterLayer with another: + +``` +stack.iloc[0] = Raster(nc.band7).iloc[0] +``` + +Append layers from another Raster to the stack. Note that this occurs in-place. Duplicate names are automatically given +a suffix: + +``` +stack.append(Raster(nc.band7)) +stack.names +``` + +Rename RasterLayers using a dict of old_name : new_name pairs: + +``` +stack.names +stack.rename({'lsat7_2000_30': 'new_name'}, in_place=True) +stack.names +stack.new_name +stack['new_name'] +stack.loc['new_name'] +``` + +We can also change all of the column names by replacing them: + +``` +stack.names = ["band1", "band2", "band3", "band4", "band5", "band7"] +``` + +Drop a RasterLayer: + +``` +stack.names +stack.drop(labels='band1', in_place=True) +stack.names +``` + +Save a Raster: + +``` +tmp_tif = tempfile.NamedTemporaryFile().name + '.tif' +newstack = stack.write(file_path=tmp_tif, nodata=-9999) +newstack.band2.read() +``` + +### Plotting + +Basic plotting has been added to as a method to RasterLayer and Raster options. More controls on plotting will be added +in the future. Currently you can set a matplotlib cmap for each RasterLayer using the RasterLayer.cmap attribute. + +Plot a single RasterLayer: + +``` +from matplotlib.colors import Normalize + +stack = Raster(predictors) +stack.lsat7_2000_10.cmap = 'plasma' +stack.lsat7_2000_10.norm = Normalize(20, 210) +stack.lsat7_2000_10.plot() +``` + +Plot all RasterLayers in a Raster object: + +``` +stack.plot() +``` + +### Integration with Pandas + +Data from a Raster object can be converted into a Pandas dataframe, with each pixel representing by a row, and columns +reflecting the x, y coordinates and the values of each RasterLayer in the Raster object: + +``` +df = stack.to_pandas(max_pixels=50000, resampling='nearest') +df.head() +``` + +The original raster is up-sampled based on max_pixels and the resampling method, which uses all of resampling methods +available in the underlying rasterio library for decimated reads. The Raster.to_pandas method can be useful for plotting +datasets, or combining with a library such as plotnine to create ggplot2-style plots of stacks of RasterLayers: + +``` +from plotnine import * +(ggplot(df.melt(id_vars=['x', 'y']), aes(x='x', y='y', fill='value')) + +geom_tile() + facet_wrap('variable')) +``` + +## A Machine Learning Workflow + +### Extract Training Data + +Load some training data in the form of polygons, points and labelled pixels in geopandas GeoDataFrame objects. We will +also generate some line geometries by converting the polygon boundaries into linestrings. All of these geometry types +can be used to spatially query pixel values in a Raster object, however each GeoDataFrame must contain only one type of +geometry (i.e. either shapely points, polygons or linestrings). + +``` +training_py = geopandas.read_file(nc.polygons) +training_pt = geopandas.read_file(nc.points) +training_px = rasterio.open(nc.labelled_pixels) +training_lines = deepcopy(training_py) +training_lines['geometry'] = training_lines.geometry.boundary +``` + +Show training data points and a single raster band using numpy and matplotlib: + +``` +stack = Raster(predictors) +plt.imshow(stack.lsat7_2000_70.read(masked=True), + extent=rasterio.plot.plotting_extent(stack.lsat7_2000_70)) +plt.scatter(x=training_pt.bounds.iloc[:, 0], + y=training_pt.bounds.iloc[:, 1], + s=2, color='black') +plt.show() +``` + +Pixel values in the Raster object can be spatially queried using the `extract_vector` and `extract_raster` +methods. In addition, the `extract_xy` method can be used to query pixel values using a 2d array of x and y +coordinates. + +The `extract_vector` method accepts a Geopandas GeoDataFrame as the `gdf` argument. For +GeoDataFrames containing shapely point geometries, the closest pixel to each point is sampled. For shapely polygon +geometries, all pixels whose centres are inside the polygon are sampled. For shapely linestring geometries, every pixel +touched by the line is sampled. For all geometry types, pixel values are queries for each geometry separately. This +means that overlapping polygons or points that fall within the same pixel with cause the same pixel to be sampled +multiple times. + +By default, the extract functions return a Geopandas GeoDataFrame of point geometries and the DataFrame containing the +extracted pixels, with the column names set by the names of the raster datasets in the Raster object. The user can also +use the `return_array=True` argument, which instead of returning a DataFrame will return three masked numpy arrays +(id, X, coordinates) containing geodataframe indices, the extracted pixel values, and the spatial coordinates of the sampled +pixels. These arrays are masked arrays with nodata values in the RasterStack datasets being masked. + +The `extract_raster` method can also be used to spatially query pixel values from a Raster object using another +raster containing labelled pixels. This raster has to be spatially aligned with the Raster object. This method also returns +the values of the labelled pixels along with the queried pixel values. + +``` +# Create a training dataset by extracting the raster values at the training point locations: +df_points = stack.extract_vector(training_pt) +df_polygons = stack.extract_vector(training_py) +df_lines = stack.extract_vector(training_lines) +df_raster = stack.extract_raster(training_px) + +df_points.head() + +# join the extracted pixel data back with the training data +df_points = df_points.droplevel(0).merge( + training_pt.loc[:, ("id")], + left_index=True, + right_index=True +) +df_points = df_points.dropna() +``` + +### Model Training + +Next we can train a logistic regression classifier: + +``` +# Next we can train a logistic regression classifier: +from sklearn.linear_model import LogisticRegressionCV +from sklearn.preprocessing import StandardScaler +from sklearn.pipeline import Pipeline +from sklearn.model_selection import cross_validate + +# define the classifier with standardization of the input features in a pipeline +lr = Pipeline( + [('scaling', StandardScaler()), + ('classifier', LogisticRegressionCV(n_jobs=-1))]) + +# fit the classifier +X = df_points.drop(columns=["geometry", "id"]) +y = df_points.id +lr.fit(X, y) +```` + +After defining a classifier, a typical step consists of performing a cross-validation to evaluate the performance of the +model. Scikit-learn provides the cross_validate function for this purpose. In comparison to non-spatial data, spatial +data can be spatially correlated, which potentially can mean that geographically proximal samples may not represent +independent samples if they are within the autocorrelation range of some of the predictors. This will lead to overly +optimistic performance measures if samples in the training dataset / cross-validation partition are strongly spatially +correlated with samples in the test dataset / cross-validation partition. + +In this case, performing cross-validation using groups is useful, because these groups can represent spatial clusters of +training samples, and samples from the same group will never occur in both the training and test partitions of a +cross-validation. An example of creating random spatial clusters from point coordinates is provided here: + +``` +# spatial cross-validation +from sklearn.cluster import KMeans + +# create 10 spatial clusters based on clustering of the training data point x,y coordinates +clusters = KMeans(n_clusters=34, n_jobs=-1) +clusters.fit(df_points.geometry.bounds.iloc[:, 0:2]) + +# cross validate +scores = cross_validate( + lr, X, y, groups=clusters.labels_, + scoring='accuracy', + cv=3, n_jobs=1) +scores['test_score'].mean() +``` + +### Raster Prediction + +Prediction on the Raster object is performed using the `predict` and `predict_proba` methods. The `estimator` is the only required +argument. If the `file_path` argument is not specified then the result is automatically written to a temporary file. +The predict method returns an rasterio.io.DatasetReader object which is open. For probability prediction, +`indexes` can also be supplied if you only want to output the probabilities for a particular class, or list of +classes, by supplying the indices of those classes: + +``` +# prediction +result = stack.predict(estimator=lr, dtype='int16', nodata=0) +result_probs = stack.predict_proba(estimator=lr) + +# plot classification result +result.plot() +plt.show() + +# plot class probabilities +result_probs.plot() +plt.show() +``` + +## Sampling Tools + +For many spatial models, it is common to take a random sample of the predictors to represent a single class (i.e. an +environmental background or pseudo-absences in a binary classification model). The sample function is supplied in the +sampling module for this purpose: +``` +# extract training data using a random sample +df_rand = stack.sample(size=100, random_state=1) +df_rand.plot() +``` + +The sample function also enables stratified random sampling based on passing a categorical raster dataset to the strata +argument. The categorical raster should spatially overlap with the dataset to be sampled, but it does not need to be of +the same grid resolution. This raster should be passed as another `Raster` dataset containing a single categorical layer: + +``` +strata = Raster(nc.strata) +df_strata = stack.sample(size=5, strata=strata, random_state=1) +df_strata = df_strata.dropna() + +fig, ax = plt.subplots() +ax.imshow(strata.read(1, masked=True), extent=rasterio.plot.plotting_extent(strata), cmap='tab10') +df_strata.plot(ax=ax, markersize=20, color='white') +plt.show() +``` + +## Vector Data Tools + +In some cases, we don't need all of the training data, but rather would spatially thin a point dataset. The +filter_points function performs point-thinning based on a minimum distance buffer on a geopandas dataframe containing +point geometries: + +``` +from pyspatialml.vector import filter_points + +thinned_points = filter_points(training_pt, min_dist=500, remove='first') +thinned_points.shape +``` + +We can also generate random points within polygons using the get_random_point_in_polygon function. This requires a +shapely POLYGON geometry as an input, and returns a shapely POINT object: + +``` +from pyspatialml.vector import get_random_point_in_polygon + +# generate 5 random points in a single polygon +random_points = [get_random_point_in_polygon(training_py.geometry[0]) for i in range(5)] + +# convert to a GeoDataFrame +random_points = geopandas.GeoDataFrame( + geometry=geopandas.GeoSeries(random_points)) +``` diff --git a/_freeze/docs/geoprocessing/execute-results/html.json b/_freeze/docs/geoprocessing/execute-results/html.json index 6691e06..d415e02 100644 --- a/_freeze/docs/geoprocessing/execute-results/html.json +++ b/_freeze/docs/geoprocessing/execute-results/html.json @@ -1,12 +1,12 @@ -{ - "hash": "5f253fe0d4b07a283614d077bb8a5e51", - "result": { - "engine": "jupyter", - "markdown": "---\ntitle: Raster Geoprocessing\nformat:\n html:\n code-fold: false\n toc: true\n---\n\nPyspatialml includes common geoprocessing methods that collectively operate on\nstacks of raster datasets, such as cropping, reprojecting, masking etc. Most\nof these methods are simple wrappers around underlying rasterio functions, but\napplied to stacks of raster datasets.\n\n## Handling of Temporary Files\n\nAll of the geoprocessing methods have a `file_path` parameter to specify a file\npath to save the results of th geoprocessing operation. However, pyspatialml is\ndesigned for quick an interactive analyses on raster datasets, and if a file\npath is not specified then the results are saved to a temporary file location\nand a new Raster object is returned with the geoprocessing results.\n\nFor datasets that will easily fit into memory, all geoprocessing methods also\nhave an `in_memory` parameter. If `in_memory=True` is set, then the results\nwill be created using Rasterio's in-memory files and stored in RAM. This has\nperformance advantages, at the expense of memory expenditure.\n\n## Cropping\n\nAll layers within a Raster can be cropped to a new extent using the\n`Raster.crop` method.\n\n::: {#70f3733f .cell execution_count=1}\n``` {.python .cell-code}\nimport geopandas as gpd\nimport matplotlib.pyplot as plt\nfrom pyspatialml import Raster\nfrom pyspatialml.datasets import nc\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\ntraining_py = gpd.read_file(nc.polygons)\n\nstack = Raster(predictors)\n\nstack.plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](geoprocessing_files/figure-html/cell-2-output-1.png){width=604 height=372}\n:::\n:::\n\n\n::: {#d43b4f2d .cell execution_count=2}\n``` {.python .cell-code}\n# crop to new extent (xmin, ymin, xmax, ymax)\ncrop_bounds = training_py.loc[0, \"geometry\"].bounds\nstack_cropped = stack.crop(crop_bounds)\n\nstack_cropped.plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](geoprocessing_files/figure-html/cell-3-output-1.png){width=604 height=370}\n:::\n:::\n\n\n## Masking\n\nIn comparison to cropping, masking can be used to set pixels that occur outside\nof masking geometries to NaN, and optionally can also crop a Raster.\n\n::: {#33fabaa0 .cell execution_count=3}\n``` {.python .cell-code}\nimport geopandas as gpd\nimport pyspatialml.datasets.nc as nc\nfrom pyspatialml import Raster\n\ntraining_py = gpd.read_file(nc.polygons)\nmask_py = training_py.iloc[0:1, :]\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\nstack = Raster(predictors)\n\n# mask a Raster\nmasked_object = stack.mask(mask_py)\n```\n:::\n\n\n## Intersecting Layers\n\nThe `Raster.intersect` method computes the geometric intersection of the\nRasterLayers with the Raster object. This will cause nodata values in any of\nthe rasters to be propagated through all of the output rasters.\n\n::: {#d7a7ebe7 .cell execution_count=4}\n``` {.python .cell-code}\nimport pyspatialml.datasets.nc as nc\nfrom pyspatialml import Raster\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\nstack = Raster(predictors)\n\nresult = stack.intersect()\n```\n:::\n\n\nThe intersect method is memory-safe, i.e. the intersection occurs during\nwindowed reading and writing of the Raster. The size and dimensions of the\nwindows can be changed using the `Raster.block_shapes` property.\n\n## Reprojecting\n\nReprojecting a raster using the `Raster.to_crs` method.\n\n::: {#8f71eb11 .cell execution_count=5}\n``` {.python .cell-code}\nstack_prj = stack.to_crs(crs={\"init\": \"EPSG:4326\"})\n```\n:::\n\n\nOther parameters that can be passed and their defaults are\nresampling=\"nearest\", file_path=None, driver=\"GTiff\", nodata=None, n_jobs=1,\nwarp_mem_lim=0, progress=False, and other kwargs that are passed to the raster\nformat drivers.\n\n## Resampling\n\nThe `Raster.aggregate` method is used to resample a raster to a different\nresolution using the decimated reading approach in the rasterio library.\n\n::: {#1452d771 .cell execution_count=6}\n``` {.python .cell-code}\nstack.aggregate(out_shape=(50, 50), resampling=\"nearest\", driver=\"GTiff\")\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nRaster Object Containing 6 Layers\n attribute values\n0 names [lsat7_2000_10, lsat7_2000_20, lsat7_2000_30, ...\n1 files [/var/folders/_m/kbp8r1612yj1xl6ndb2y8vpm0000g...\n2 rows 50\n3 cols 50\n4 res (278.73, 252.51)\n5 nodatavals [-3.4028234663852886e+38, -3.4028234663852886e...\n```\n:::\n\n::: {.cell-output .cell-output-display execution_count=6}\n```\n\n```\n:::\n:::\n\n\n## Computation\n\nApply user-supplied function to a Raster object.\n\n::: {#ac19dac5 .cell execution_count=7}\n``` {.python .cell-code}\ndef myfun(x):\n return x + 1\n\nstack.apply(myfun)\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nRaster Object Containing 6 Layers\n attribute values\n0 names [tmp0n2yd7no_1, tmp0n2yd7no_2, tmp0n2yd7no_3, ...\n1 files [/var/folders/_m/kbp8r1612yj1xl6ndb2y8vpm0000g...\n2 rows 443\n3 cols 489\n4 res (28.5, 28.5)\n5 nodatavals [-3.4028234663852886e+38, -3.4028234663852886e...\n```\n:::\n\n::: {.cell-output .cell-output-display execution_count=7}\n```\n\n```\n:::\n:::\n\n\nWhere `function` is a user-defined python that takes an numpy array as a\nsingle argument, and can return either a 2d array that represents a single\nraster dataset, such as NDVI, or can operate on a number of layers and can\nreturn a raster with multiple layers in a 3d array in (layer, row, col)\norder.\n\nThe apply function is memory-safe, i.e. it applies the function to windows\nof the raster data that are read sequentially or in parallel\n(with n_jobs != 1). The size and dimensions of the windows can be changed\nusing the `Raster.block_shapes` property.\n\n## Raster Algebra\n\nRasterLayer objects also support basic raster math operations using python's\nmagic methods, which supports all of the usual math operators. Calculations\non RasterLayers occur in memory using Rasterio's in-memory files, thus they\nare not memory safe. For applying computations and algebra to large raster\ndatasets in windows, use `Raster.apply()`.\n\n::: {#a90b8434 .cell execution_count=8}\n``` {.python .cell-code}\na = stack.iloc[0] + stack.iloc[1]\nb = stack.iloc[0] - stack.iloc[1]\n\nndvi = (stack.iloc[3] - stack.iloc[2]) / (stack.iloc[3] + stack.iloc[2])\n```\n:::\n\n\nArithmetic operations on RasterLayer's will return another RasterLayer. The\nresult can be coerced into a Raster object using:\n\n::: {#0ce834ce .cell execution_count=9}\n``` {.python .cell-code}\nndvi = Raster((stack.iloc[3] - stack.iloc[2]) / (stack.iloc[3] + stack.iloc[2]))\n```\n:::\n\n\nArithmetic operations are only supported on RasterLayer objects and\nnot in a parent Raster object directly.\n\n", - "supporting": [ - "geoprocessing_files" - ], - "filters": [], - "includes": {} - } +{ + "hash": "5f253fe0d4b07a283614d077bb8a5e51", + "result": { + "engine": "jupyter", + "markdown": "---\ntitle: Raster Geoprocessing\nformat:\n html:\n code-fold: false\n toc: true\n---\n\nPyspatialml includes common geoprocessing methods that collectively operate on\nstacks of raster datasets, such as cropping, reprojecting, masking etc. Most\nof these methods are simple wrappers around underlying rasterio functions, but\napplied to stacks of raster datasets.\n\n## Handling of Temporary Files\n\nAll of the geoprocessing methods have a `file_path` parameter to specify a file\npath to save the results of th geoprocessing operation. However, pyspatialml is\ndesigned for quick an interactive analyses on raster datasets, and if a file\npath is not specified then the results are saved to a temporary file location\nand a new Raster object is returned with the geoprocessing results.\n\nFor datasets that will easily fit into memory, all geoprocessing methods also\nhave an `in_memory` parameter. If `in_memory=True` is set, then the results\nwill be created using Rasterio's in-memory files and stored in RAM. This has\nperformance advantages, at the expense of memory expenditure.\n\n## Cropping\n\nAll layers within a Raster can be cropped to a new extent using the\n`Raster.crop` method.\n\n::: {#70f3733f .cell execution_count=1}\n``` {.python .cell-code}\nimport geopandas as gpd\nimport matplotlib.pyplot as plt\nfrom pyspatialml import Raster\nfrom pyspatialml.datasets import nc\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\ntraining_py = gpd.read_file(nc.polygons)\n\nstack = Raster(predictors)\n\nstack.plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](geoprocessing_files/figure-html/cell-2-output-1.png){width=604 height=372}\n:::\n:::\n\n\n::: {#d43b4f2d .cell execution_count=2}\n``` {.python .cell-code}\n# crop to new extent (xmin, ymin, xmax, ymax)\ncrop_bounds = training_py.loc[0, \"geometry\"].bounds\nstack_cropped = stack.crop(crop_bounds)\n\nstack_cropped.plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](geoprocessing_files/figure-html/cell-3-output-1.png){width=604 height=370}\n:::\n:::\n\n\n## Masking\n\nIn comparison to cropping, masking can be used to set pixels that occur outside\nof masking geometries to NaN, and optionally can also crop a Raster.\n\n::: {#33fabaa0 .cell execution_count=3}\n``` {.python .cell-code}\nimport geopandas as gpd\nimport pyspatialml.datasets.nc as nc\nfrom pyspatialml import Raster\n\ntraining_py = gpd.read_file(nc.polygons)\nmask_py = training_py.iloc[0:1, :]\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\nstack = Raster(predictors)\n\n# mask a Raster\nmasked_object = stack.mask(mask_py)\n```\n:::\n\n\n## Intersecting Layers\n\nThe `Raster.intersect` method computes the geometric intersection of the\nRasterLayers with the Raster object. This will cause nodata values in any of\nthe rasters to be propagated through all of the output rasters.\n\n::: {#d7a7ebe7 .cell execution_count=4}\n``` {.python .cell-code}\nimport pyspatialml.datasets.nc as nc\nfrom pyspatialml import Raster\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\nstack = Raster(predictors)\n\nresult = stack.intersect()\n```\n:::\n\n\nThe intersect method is memory-safe, i.e. the intersection occurs during\nwindowed reading and writing of the Raster. The size and dimensions of the\nwindows can be changed using the `Raster.block_shapes` property.\n\n## Reprojecting\n\nReprojecting a raster using the `Raster.to_crs` method.\n\n::: {#8f71eb11 .cell execution_count=5}\n``` {.python .cell-code}\nstack_prj = stack.to_crs(crs={\"init\": \"EPSG:4326\"})\n```\n:::\n\n\nOther parameters that can be passed and their defaults are\nresampling=\"nearest\", file_path=None, driver=\"GTiff\", nodata=None, n_jobs=1,\nwarp_mem_lim=0, progress=False, and other kwargs that are passed to the raster\nformat drivers.\n\n## Resampling\n\nThe `Raster.aggregate` method is used to resample a raster to a different\nresolution using the decimated reading approach in the rasterio library.\n\n::: {#1452d771 .cell execution_count=6}\n``` {.python .cell-code}\nstack.aggregate(out_shape=(50, 50), resampling=\"nearest\", driver=\"GTiff\")\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nRaster Object Containing 6 Layers\n attribute values\n0 names [lsat7_2000_10, lsat7_2000_20, lsat7_2000_30, ...\n1 files [/var/folders/_m/kbp8r1612yj1xl6ndb2y8vpm0000g...\n2 rows 50\n3 cols 50\n4 res (278.73, 252.51)\n5 nodatavals [-3.4028234663852886e+38, -3.4028234663852886e...\n```\n:::\n\n::: {.cell-output .cell-output-display execution_count=6}\n```\n\n```\n:::\n:::\n\n\n## Computation\n\nApply user-supplied function to a Raster object.\n\n::: {#ac19dac5 .cell execution_count=7}\n``` {.python .cell-code}\ndef myfun(x):\n return x + 1\n\nstack.apply(myfun)\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nRaster Object Containing 6 Layers\n attribute values\n0 names [tmp0n2yd7no_1, tmp0n2yd7no_2, tmp0n2yd7no_3, ...\n1 files [/var/folders/_m/kbp8r1612yj1xl6ndb2y8vpm0000g...\n2 rows 443\n3 cols 489\n4 res (28.5, 28.5)\n5 nodatavals [-3.4028234663852886e+38, -3.4028234663852886e...\n```\n:::\n\n::: {.cell-output .cell-output-display execution_count=7}\n```\n\n```\n:::\n:::\n\n\nWhere `function` is a user-defined python that takes an numpy array as a\nsingle argument, and can return either a 2d array that represents a single\nraster dataset, such as NDVI, or can operate on a number of layers and can\nreturn a raster with multiple layers in a 3d array in (layer, row, col)\norder.\n\nThe apply function is memory-safe, i.e. it applies the function to windows\nof the raster data that are read sequentially or in parallel\n(with n_jobs != 1). The size and dimensions of the windows can be changed\nusing the `Raster.block_shapes` property.\n\n## Raster Algebra\n\nRasterLayer objects also support basic raster math operations using python's\nmagic methods, which supports all of the usual math operators. Calculations\non RasterLayers occur in memory using Rasterio's in-memory files, thus they\nare not memory safe. For applying computations and algebra to large raster\ndatasets in windows, use `Raster.apply()`.\n\n::: {#a90b8434 .cell execution_count=8}\n``` {.python .cell-code}\na = stack.iloc[0] + stack.iloc[1]\nb = stack.iloc[0] - stack.iloc[1]\n\nndvi = (stack.iloc[3] - stack.iloc[2]) / (stack.iloc[3] + stack.iloc[2])\n```\n:::\n\n\nArithmetic operations on RasterLayer's will return another RasterLayer. The\nresult can be coerced into a Raster object using:\n\n::: {#0ce834ce .cell execution_count=9}\n``` {.python .cell-code}\nndvi = Raster((stack.iloc[3] - stack.iloc[2]) / (stack.iloc[3] + stack.iloc[2]))\n```\n:::\n\n\nArithmetic operations are only supported on RasterLayer objects and\nnot in a parent Raster object directly.\n\n", + "supporting": [ + "geoprocessing_files" + ], + "filters": [], + "includes": {} + } } \ No newline at end of file diff --git a/_freeze/docs/guide/execute-results/html.json b/_freeze/docs/guide/execute-results/html.json new file mode 100644 index 0000000..a251617 --- /dev/null +++ b/_freeze/docs/guide/execute-results/html.json @@ -0,0 +1,16 @@ +{ + "hash": "1ddec99dd2cca1a0e802081725fd4c4e", + "result": { + "engine": "jupyter", + "markdown": "m q---\ntitle: \"Quick start\"\nformat:\n html:\n code-fold: false\n toc: true\njupyter: python3\n\n---\n\n## Initiating a Raster Object\n\nWe are going to use a set of Landsat 7 bands contained within the nc example\ndata:\n\n::: {#d5219e67 .cell execution_count=1}\n``` {.python .cell-code}\nfrom pyspatialml import Raster\nimport pyspatialml.datasets.nc as nc\nimport matplotlib.pyplot as plt\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\n```\n:::\n\n\nThese raster datasets are aligned in terms of their extent and coordinate\nreference systems. We can 'stack' these into a Raster class so that we can\nperform machine learning related operations on the set of rasters:\n\n::: {#9103e529 .cell execution_count=2}\n``` {.python .cell-code}\nstack = Raster(predictors)\n```\n:::\n\n\nWhen a Raster object is created, the names to each layer are automatically\ncreated based on syntactically-correct versions of the file basenames:\n\n::: {#a9965773 .cell execution_count=3}\n``` {.python .cell-code}\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=3}\n```\ndict_keys(['lsat7_2000_10', 'lsat7_2000_20', 'lsat7_2000_30', 'lsat7_2000_40', 'lsat7_2000_50', 'lsat7_2000_70'])\n```\n:::\n:::\n\n\nColor ramps and matplotlib.colors.Normalize objects can be assigned to each\nRasterLayer in the object using the `cmap` and `norm` attributes for\nconvenient in plotting:\n\n::: {#cdb75bd4 .cell execution_count=4}\n``` {.python .cell-code}\nstack.lsat7_2000_10.cmap = \"Blues\"\nstack.lsat7_2000_20.cmap = \"Greens\"\nstack.lsat7_2000_30.cmap = \"Reds\"\nstack.lsat7_2000_40.cmap = \"RdPu\"\nstack.lsat7_2000_50.cmap = \"autumn\"\nstack.lsat7_2000_70.cmap = \"hot\"\n\nstack.plot(\n title_fontsize=8,\n label_fontsize=6,\n legend_fontsize=6,\n names=[\"B1\", \"B2\", \"B3\", \"B4\", \"B5\", \"B7\"],\n fig_kwds={\"figsize\": (8, 4)},\n subplots_kwds={\"wspace\": 0.3}\n)\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](guide_files/figure-html/cell-5-output-1.png){width=679 height=342}\n:::\n:::\n\n\n## Subsetting and Indexing\n\nIndexing of Raster objects is provided by several methods:\n\nThe ``Raster[keys]`` method enables key-based indexing using a name of a\nRasterLayer, or a list of names. Direct subsetting of a Raster object instance\nreturns a RasterLayer if only a single label is used, otherwise it always\nreturns a new Raster object containing only the selected layers.\n\nThe ``Raster.iloc[int, list, tuple, slice]`` method allows a Raster object\ninstance to be subset using integer-based indexing or slicing. The ``iloc``\nmethod returns a RasterLayer object if only a single index is used, otherwise\nit always returns a new Raster object containing only the selected layers.\n\nSubsetting of a Raster object instance can also occur by using attribute names\nin the form of ``Raster.name_of_layer``. Because only a single RasterLayer can\nbe subset at one time using this approach, a RasterLayer object is always\nreturned.\n\nExamples of methods to subset a Raster object:\n\n::: {#00bbc21a .cell execution_count=5}\n``` {.python .cell-code}\n# subset based on position\nsingle_layer = stack.iloc[0]\n\n# subset using a slice\nnew_raster_obj = stack.iloc[0:3]\n\n# subset using labels\nsingle_layer = stack['lsat7_2000_10']\nsingle_layer = stack.lsat7_2000_10\n\n# list or tuple of keys\nnew_raster_obj = stack[('lsat7_2000_10', 'lsat7_2000_20')]\n```\n:::\n\n\nIterate through RasterLayers individually:\n\n::: {#0c05154f .cell execution_count=6}\n``` {.python .cell-code}\nfor name, layer in stack.items():\n print(name, layer)\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nlsat7_2000_10 \nlsat7_2000_20 \nlsat7_2000_30 \nlsat7_2000_40 \nlsat7_2000_50 \nlsat7_2000_70 \n```\n:::\n:::\n\n\nReplace a RasterLayer with another:\n\n::: {#8ffa02a1 .cell execution_count=7}\n``` {.python .cell-code}\nstack.iloc[0] = Raster(nc.band7).iloc[0]\n\nstack.iloc[0].plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](guide_files/figure-html/cell-8-output-1.png){width=499 height=413}\n:::\n:::\n\n\n## Appending and Dropping Layers\n\nAppend layers from another Raster to the stack. Duplicate names are\nautomatically given a suffix.\n\n::: {#3c34de34 .cell execution_count=8}\n``` {.python .cell-code}\nstack.append(Raster(nc.band7), in_place=True)\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=8}\n```\ndict_keys(['lsat7_2000_10', 'lsat7_2000_20', 'lsat7_2000_30', 'lsat7_2000_40', 'lsat7_2000_50', 'lsat7_2000_70_1', 'lsat7_2000_70_2'])\n```\n:::\n:::\n\n\nRename RasterLayers using a dict of old_name : new_name pairs:\n\n::: {#4396ad2c .cell execution_count=9}\n``` {.python .cell-code}\nstack.names\nstack.rename({'lsat7_2000_30': 'new_name'}, in_place=True)\nstack.names\nstack.new_name\nstack['new_name']\n```\n\n::: {.cell-output .cell-output-display execution_count=9}\n```\n\n```\n:::\n:::\n\n\nDrop a RasterLayer:\n\n::: {#42702d40 .cell execution_count=10}\n``` {.python .cell-code}\nstack.names\nstack.drop(labels='lsat7_2000_70_1', in_place=True)\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=10}\n```\ndict_keys(['lsat7_2000_10', 'lsat7_2000_20', 'new_name', 'lsat7_2000_40', 'lsat7_2000_50', 'lsat7_2000_70_2'])\n```\n:::\n:::\n\n\n## Integration with Pandas\n\nData from a Raster object can converted into a `Pandas.DataDrame`, with each\npixel representing by a row, and columns reflecting the x, y coordinates and\nthe values of each RasterLayer in the Raster object:\n\n::: {#d65fd523 .cell execution_count=11}\n``` {.python .cell-code}\nimport pandas as pd\n\ndf = stack.to_pandas(max_pixels=50000, resampling='nearest')\ndf.head()\n```\n\n::: {.cell-output .cell-output-display execution_count=11}\n```{=html}\n
\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
xylsat7_2000_10lsat7_2000_20new_namelsat7_2000_40lsat7_2000_50lsat7_2000_70_2
0630534.000000228114.0NaNNaNNaNNaNNaNNaN
1630562.558402228114.0NaNNaNNaNNaNNaNNaN
2630591.116803228114.0NaNNaNNaNNaNNaNNaN
3630619.675205228114.0NaNNaNNaNNaNNaNNaN
4630648.233607228114.0NaNNaNNaNNaNNaNNaN
\n
\n```\n:::\n:::\n\n\nThe original raster is up-sampled based on max_pixels and the resampling\nmethod, which uses all of resampling methods available in the underlying\nrasterio library for decimated reads.\n\n## Saving a Raster to File\n\nSave a Raster:\n\n::: {#beb7aef0 .cell execution_count=12}\n``` {.python .cell-code}\nimport tempfile\n\ntmp_tif = tempfile.NamedTemporaryFile().name + '.tif'\nnewstack = stack.write(file_path=tmp_tif, nodata=-9999)\nnewstack.new_name.read()\nnewstack = None\n```\n:::\n\n\n", + "supporting": [ + "guide_files/figure-html" + ], + "filters": [], + "includes": { + "include-in-header": [ + "\n\n\n" + ] + } + } +} \ No newline at end of file diff --git a/_freeze/docs/guide/figure-html/cell-5-output-1.png b/_freeze/docs/guide/figure-html/cell-5-output-1.png new file mode 100644 index 0000000..afdf15a Binary files /dev/null and b/_freeze/docs/guide/figure-html/cell-5-output-1.png differ diff --git a/_freeze/docs/guide/figure-html/cell-8-output-1.png b/_freeze/docs/guide/figure-html/cell-8-output-1.png new file mode 100644 index 0000000..1fe4052 Binary files /dev/null and b/_freeze/docs/guide/figure-html/cell-8-output-1.png differ diff --git a/_freeze/docs/installation/execute-results/html.json b/_freeze/docs/installation/execute-results/html.json index b269b7e..72ee659 100644 --- a/_freeze/docs/installation/execute-results/html.json +++ b/_freeze/docs/installation/execute-results/html.json @@ -1,12 +1,12 @@ -{ - "hash": "65228f7d1a193c5cace2c9a9607b0799", - "result": { - "engine": "jupyter", - "markdown": "---\ntitle: Installation\nformat:\n html:\n code-fold: false\n---\n\nPyspatialml is available on PyPI and can be installed in the usual manner with:\n\n::: {#bb02f017 .cell execution_count=1}\n``` {.python .cell-code}\npip install Pyspatialml\n```\n:::\n\n\nThe development version, which is more up-to-date with changes to the package\nespecially during these earlier stages of development, can be installed\ndirectly via:\n\n::: {#66ac18c4 .cell execution_count=2}\n``` {.python .cell-code}\npip install git+https://github.com/stevenpawley/Pyspatialml\n```\n:::\n\n\n", - "supporting": [ - "installation_files" - ], - "filters": [], - "includes": {} - } +{ + "hash": "65228f7d1a193c5cace2c9a9607b0799", + "result": { + "engine": "jupyter", + "markdown": "---\ntitle: Installation\nformat:\n html:\n code-fold: false\n---\n\nPyspatialml is available on PyPI and can be installed in the usual manner with:\n\n::: {#bb02f017 .cell execution_count=1}\n``` {.python .cell-code}\npip install Pyspatialml\n```\n:::\n\n\nThe development version, which is more up-to-date with changes to the package\nespecially during these earlier stages of development, can be installed\ndirectly via:\n\n::: {#66ac18c4 .cell execution_count=2}\n``` {.python .cell-code}\npip install git+https://github.com/stevenpawley/Pyspatialml\n```\n:::\n\n\n", + "supporting": [ + "installation_files" + ], + "filters": [], + "includes": {} + } } \ No newline at end of file diff --git a/_freeze/docs/landcover/execute-results/html.json b/_freeze/docs/landcover/execute-results/html.json index 2840795..ead1c8a 100644 --- a/_freeze/docs/landcover/execute-results/html.json +++ b/_freeze/docs/landcover/execute-results/html.json @@ -1,16 +1,16 @@ -{ - "hash": "c266ff597585baa54084adb9bc9f1dd5", - "result": { - "engine": "jupyter", - "markdown": "---\ntitle: Landcover classification\nformat:\n html:\n code-fold: false\n---\n\nLandcover classification is a common task in remote sensing. This example\ndemonstrates how to extract training data from a raster and vector data, train\na classifier, and predict landcover classes on a raster.\n\n## Data\n\nThe data used in this example is from the Landsat 7 ETM+ sensor, and represents\nan extract of data derived from the GRASS GIS North Carolina example dataset. \nThe data consists of 6 bands (1, 2, 3, 4, 5, 7) and labelled pixels. The labelled \npixels are used as training data for the classifier. The data is stored in the\n`pyspatialml.datasets` module.\n\n## Extraction Training Data\n\nLoad some training data in the form of polygons, points and labelled pixels in\n``geopandas.GeoDataFrame`` objects. We will also generate some line geometries\nby converting the polygon boundaries into linestrings. All of these geometry\ntypes can be used to spatially query pixel values in a Raster object, however\neach GeoDataFrame must contain only one type of geometry (i.e. either shapely\npoints, polygons or linestrings).\n\n::: {#5384a314 .cell execution_count=1}\n``` {.python .cell-code}\nfrom pyspatialml import Raster\nfrom pyspatialml.datasets import nc\nfrom copy import deepcopy\nimport os\nimport numpy as np\nimport tempfile\nimport geopandas\nimport rasterio.plot\nimport matplotlib.pyplot as plt\n\ntraining_py = geopandas.read_file(nc.polygons)\ntraining_pt = geopandas.read_file(nc.points)\ntraining_px = rasterio.open(nc.labelled_pixels)\ntraining_lines = deepcopy(training_py)\ntraining_lines['geometry'] = training_lines.geometry.boundary\n```\n:::\n\n\nShow training data points and a single raster band using numpy and matplotlib:\n\n::: {#031c229e .cell execution_count=2}\n``` {.python .cell-code}\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\nstack = Raster(predictors)\n\nfig, ax = plt.subplots(figsize=(9, 9))\nstack.lsat7_2000_70.plot(ax=ax)\n\ntraining_py.plot(column=\"label\", ax=ax, legend=True)\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](landcover_files/figure-html/cell-3-output-1.png){width=752 height=694}\n:::\n:::\n\n\nPixel values in the Raster object can be spatially queried using the\n`extract_vector` and `extract_raster` methods. In addition, the\n`extract_xy` method can be used to query pixel values using a 2d array of x\nand y coordinates.\n\nThe `extract_vector` method accepts a Geopandas GeoDataFrame as the\n`gdf` argument. For GeoDataFrames containing shapely point geometries, the\nclosest pixel to each point is sampled. For shapely polygon geometries, all\npixels whose centres are inside the polygon are sampled. For shapely\nlinestring geometries, every pixel touched by the line is sampled. For all\ngeometry types, pixel values are queries for each geometry separately. This\nmeans that overlapping polygons or points that fall within the same pixel with\ncause the same pixel to be sampled multiple times.\n\nBy default, the extract functions return a Geopandas GeoDataFrame of point\ngeometries and the DataFrame containing the extracted pixels, with the column\nnames set by the names of the raster datasets in the Raster object. The user\ncan also use the `return_array=True` argument, which instead of returning a\nDataFrame will return three masked numpy arrays (ids, X, xy) containing the\ngeodataframe index positions, extracted pixel values, and the spatial\ncoordinates of the sampled pixels. These arrays are masked arrays.\n\nThe `extract_raster` method can also be used to spatially query pixel values\nfrom a Raster object using another raster containing labelled pixels. This\nraster has to be spatially aligned with the Raster object. The values of the\nlabelled pixels are returned along with the queried pixel values.\n\n::: {#82546144 .cell execution_count=3}\n``` {.python .cell-code}\n# Extract data from rasters at the training point locations:\ndf_points = stack.extract_vector(training_pt)\ndf_polygons = stack.extract_vector(training_py)\ndf_lines = stack.extract_vector(training_lines)\n```\n:::\n\n\nFor any vector features, a GeoDataFrame is returned containing the extracted\npixel values. A pandas.MultiIndex is used to relate the pixels back to the\noriginal geometries, with the `pixel_idx` index referring to the index of each\npixel, and the `geometry_idx` referring to the index of the original geometry\nin the supplied GeoDataFrame. The pixel values themselves are represented as\n`shapely.geometry.Point` objects. These will need to be joined back with the\ncolumns of the vector features to get the labelled classes. Here we will join\nthe extracted pixels using the \"id\" column and the GeoDataFrame index of the\nvector features:\n\n::: {#b92da33c .cell execution_count=4}\n``` {.python .cell-code}\n# Join the extracted values with other columns from the training data\ndf_points[\"id\"] = training_pt[\"id\"].values\ndf_points = df_points.dropna()\ndf_points.head()\n\ndf_polygons = df_polygons.merge(\n right=training_py.loc[:, [\"label\", \"id\"]], \n left_on=\"geometry_idx\", \n right_on=\"index\",\n right_index=True\n)\n```\n:::\n\n\nIf the training data is from labelled pixels in a raster, then the extracted\ndata will contain a \"value\" column that contains the pixel labels:\n\n::: {#97c015a5 .cell execution_count=5}\n``` {.python .cell-code}\ndf_raster = stack.extract_raster(training_px)\n```\n:::\n\n\n## Model Training\n\nNext we can train a logistic regression classifier:\n\n::: {#efc89d56 .cell execution_count=6}\n``` {.python .cell-code}\nfrom sklearn.linear_model import LogisticRegressionCV\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.model_selection import cross_validate\n\n# define the classifier with standardization of the input features in a\n# pipeline\nlr = Pipeline(\n [('scaling', StandardScaler()),\n ('classifier', LogisticRegressionCV(n_jobs=-1))])\n\n# remove NaNs from training data\ndf_polygons = df_polygons.dropna()\n\n# fit the classifier\nX = df_polygons.drop(columns=[\"id\", \"label\", \"geometry\"]).values\ny = df_polygons[\"id\"].values\nlr.fit(X, y)\n```\n\n::: {.cell-output .cell-output-display execution_count=6}\n```{=html}\n
Pipeline(steps=[('scaling', StandardScaler()),\n                ('classifier', LogisticRegressionCV(n_jobs=-1))])
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
\n```\n:::\n:::\n\n\nAfter defining a classifier, a typical step consists of performing a\ncross-validation to evaluate the performance of the model. Scikit-learn\nprovides the cross_validate function for this purpose. In comparison to\nnon-spatial data, spatial data can be spatially correlated, which potentially\ncan mean that geographically proximal samples may not represent truely\nindependent samples if they are within the autocorrelation range of some of the\npredictors. This will lead to overly optimistic performance measures if samples\nin the training dataset / cross-validation partition are strongly spatially\ncorrelated with samples in the test dataset / cross-validation partition.\n\nIn this case, performing cross-validation using groups is useful, because these\ngroups can represent spatial clusters of training samples, and samples from the\nsame group will never occur in both the training and test partitions of a\ncross-validation. Here we can use the polygon indices as the groups, i.e.\npixels within the same polygon will not be split into training and test\npartitions:\n\n::: {#2e5891ba .cell execution_count=7}\n``` {.python .cell-code}\nscores = cross_validate(\n estimator=lr,\n X=X,\n y=y,\n groups=df_polygons.index.droplevel(\"pixel_idx\"),\n scoring=\"accuracy\",\n cv=3,\n n_jobs=1,\n)\nnp.round(scores['test_score'].mean(), 2)\n```\n\n::: {.cell-output .cell-output-display execution_count=7}\n```\n0.75\n```\n:::\n:::\n\n\n## Raster Prediction\n\nPrediction on the Raster object is performed using the `predict` method.\nThe `estimator` is the only required argument. If the `file_path` argument\nis not specified then the result is automatically written to a temporary file.\nThe predict method returns an rasterio.io.DatasetReader object which is open.\n\n::: {#499621f4 .cell execution_count=8}\n``` {.python .cell-code}\n# prediction\nresult = stack.predict(estimator=lr, dtype='int16', nodata=0)\nresult_probs = stack.predict_proba(estimator=lr)\n\n# plot classification result\nresult.iloc[0].cmap = \"Dark2\"\nresult.iloc[0].categorical = True\n\nresult.plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](landcover_files/figure-html/cell-9-output-1.png){width=552 height=427}\n:::\n:::\n\n\nThe `predict_proba` method can be used to output class probabilities as\na multi-band raster (a band for each class probability). In the latter case,\n`indexes` can also be supplied if you only want to output the probabilities\nfor a particular class, or list of classes, by supplying the indices of those\nclasses:\n\n::: {#40b27083 .cell execution_count=9}\n``` {.python .cell-code}\nresult_probs.plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-stderr}\n```\n/Users/stevenpawley/Library/Caches/pypoetry/virtualenvs/pyspatialml-NqZ1tMUm-py3.11/lib/python3.11/site-packages/matplotlib/image.py:499: RuntimeWarning: overflow encountered in divide\n A_scaled /= ((a_max - a_min) / frac)\n```\n:::\n\n::: {.cell-output .cell-output-display}\n![](landcover_files/figure-html/cell-10-output-2.png){width=602 height=372}\n:::\n:::\n\n\n", - "supporting": [ - "landcover_files" - ], - "filters": [], - "includes": { - "include-in-header": [ - "\n\n\n" - ] - } - } +{ + "hash": "c266ff597585baa54084adb9bc9f1dd5", + "result": { + "engine": "jupyter", + "markdown": "---\ntitle: Landcover classification\nformat:\n html:\n code-fold: false\n---\n\nLandcover classification is a common task in remote sensing. This example\ndemonstrates how to extract training data from a raster and vector data, train\na classifier, and predict landcover classes on a raster.\n\n## Data\n\nThe data used in this example is from the Landsat 7 ETM+ sensor, and represents\nan extract of data derived from the GRASS GIS North Carolina example dataset. \nThe data consists of 6 bands (1, 2, 3, 4, 5, 7) and labelled pixels. The labelled \npixels are used as training data for the classifier. The data is stored in the\n`pyspatialml.datasets` module.\n\n## Extraction Training Data\n\nLoad some training data in the form of polygons, points and labelled pixels in\n``geopandas.GeoDataFrame`` objects. We will also generate some line geometries\nby converting the polygon boundaries into linestrings. All of these geometry\ntypes can be used to spatially query pixel values in a Raster object, however\neach GeoDataFrame must contain only one type of geometry (i.e. either shapely\npoints, polygons or linestrings).\n\n::: {#5384a314 .cell execution_count=1}\n``` {.python .cell-code}\nfrom pyspatialml import Raster\nfrom pyspatialml.datasets import nc\nfrom copy import deepcopy\nimport os\nimport numpy as np\nimport tempfile\nimport geopandas\nimport rasterio.plot\nimport matplotlib.pyplot as plt\n\ntraining_py = geopandas.read_file(nc.polygons)\ntraining_pt = geopandas.read_file(nc.points)\ntraining_px = rasterio.open(nc.labelled_pixels)\ntraining_lines = deepcopy(training_py)\ntraining_lines['geometry'] = training_lines.geometry.boundary\n```\n:::\n\n\nShow training data points and a single raster band using numpy and matplotlib:\n\n::: {#031c229e .cell execution_count=2}\n``` {.python .cell-code}\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\nstack = Raster(predictors)\n\nfig, ax = plt.subplots(figsize=(9, 9))\nstack.lsat7_2000_70.plot(ax=ax)\n\ntraining_py.plot(column=\"label\", ax=ax, legend=True)\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](landcover_files/figure-html/cell-3-output-1.png){width=752 height=694}\n:::\n:::\n\n\nPixel values in the Raster object can be spatially queried using the\n`extract_vector` and `extract_raster` methods. In addition, the\n`extract_xy` method can be used to query pixel values using a 2d array of x\nand y coordinates.\n\nThe `extract_vector` method accepts a Geopandas GeoDataFrame as the\n`gdf` argument. For GeoDataFrames containing shapely point geometries, the\nclosest pixel to each point is sampled. For shapely polygon geometries, all\npixels whose centres are inside the polygon are sampled. For shapely\nlinestring geometries, every pixel touched by the line is sampled. For all\ngeometry types, pixel values are queries for each geometry separately. This\nmeans that overlapping polygons or points that fall within the same pixel with\ncause the same pixel to be sampled multiple times.\n\nBy default, the extract functions return a Geopandas GeoDataFrame of point\ngeometries and the DataFrame containing the extracted pixels, with the column\nnames set by the names of the raster datasets in the Raster object. The user\ncan also use the `return_array=True` argument, which instead of returning a\nDataFrame will return three masked numpy arrays (ids, X, xy) containing the\ngeodataframe index positions, extracted pixel values, and the spatial\ncoordinates of the sampled pixels. These arrays are masked arrays.\n\nThe `extract_raster` method can also be used to spatially query pixel values\nfrom a Raster object using another raster containing labelled pixels. This\nraster has to be spatially aligned with the Raster object. The values of the\nlabelled pixels are returned along with the queried pixel values.\n\n::: {#82546144 .cell execution_count=3}\n``` {.python .cell-code}\n# Extract data from rasters at the training point locations:\ndf_points = stack.extract_vector(training_pt)\ndf_polygons = stack.extract_vector(training_py)\ndf_lines = stack.extract_vector(training_lines)\n```\n:::\n\n\nFor any vector features, a GeoDataFrame is returned containing the extracted\npixel values. A pandas.MultiIndex is used to relate the pixels back to the\noriginal geometries, with the `pixel_idx` index referring to the index of each\npixel, and the `geometry_idx` referring to the index of the original geometry\nin the supplied GeoDataFrame. The pixel values themselves are represented as\n`shapely.geometry.Point` objects. These will need to be joined back with the\ncolumns of the vector features to get the labelled classes. Here we will join\nthe extracted pixels using the \"id\" column and the GeoDataFrame index of the\nvector features:\n\n::: {#b92da33c .cell execution_count=4}\n``` {.python .cell-code}\n# Join the extracted values with other columns from the training data\ndf_points[\"id\"] = training_pt[\"id\"].values\ndf_points = df_points.dropna()\ndf_points.head()\n\ndf_polygons = df_polygons.merge(\n right=training_py.loc[:, [\"label\", \"id\"]], \n left_on=\"geometry_idx\", \n right_on=\"index\",\n right_index=True\n)\n```\n:::\n\n\nIf the training data is from labelled pixels in a raster, then the extracted\ndata will contain a \"value\" column that contains the pixel labels:\n\n::: {#97c015a5 .cell execution_count=5}\n``` {.python .cell-code}\ndf_raster = stack.extract_raster(training_px)\n```\n:::\n\n\n## Model Training\n\nNext we can train a logistic regression classifier:\n\n::: {#efc89d56 .cell execution_count=6}\n``` {.python .cell-code}\nfrom sklearn.linear_model import LogisticRegressionCV\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.model_selection import cross_validate\n\n# define the classifier with standardization of the input features in a\n# pipeline\nlr = Pipeline(\n [('scaling', StandardScaler()),\n ('classifier', LogisticRegressionCV(n_jobs=-1))])\n\n# remove NaNs from training data\ndf_polygons = df_polygons.dropna()\n\n# fit the classifier\nX = df_polygons.drop(columns=[\"id\", \"label\", \"geometry\"]).values\ny = df_polygons[\"id\"].values\nlr.fit(X, y)\n```\n\n::: {.cell-output .cell-output-display execution_count=6}\n```{=html}\n
Pipeline(steps=[('scaling', StandardScaler()),\n                ('classifier', LogisticRegressionCV(n_jobs=-1))])
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
\n```\n:::\n:::\n\n\nAfter defining a classifier, a typical step consists of performing a\ncross-validation to evaluate the performance of the model. Scikit-learn\nprovides the cross_validate function for this purpose. In comparison to\nnon-spatial data, spatial data can be spatially correlated, which potentially\ncan mean that geographically proximal samples may not represent truely\nindependent samples if they are within the autocorrelation range of some of the\npredictors. This will lead to overly optimistic performance measures if samples\nin the training dataset / cross-validation partition are strongly spatially\ncorrelated with samples in the test dataset / cross-validation partition.\n\nIn this case, performing cross-validation using groups is useful, because these\ngroups can represent spatial clusters of training samples, and samples from the\nsame group will never occur in both the training and test partitions of a\ncross-validation. Here we can use the polygon indices as the groups, i.e.\npixels within the same polygon will not be split into training and test\npartitions:\n\n::: {#2e5891ba .cell execution_count=7}\n``` {.python .cell-code}\nscores = cross_validate(\n estimator=lr,\n X=X,\n y=y,\n groups=df_polygons.index.droplevel(\"pixel_idx\"),\n scoring=\"accuracy\",\n cv=3,\n n_jobs=1,\n)\nnp.round(scores['test_score'].mean(), 2)\n```\n\n::: {.cell-output .cell-output-display execution_count=7}\n```\n0.75\n```\n:::\n:::\n\n\n## Raster Prediction\n\nPrediction on the Raster object is performed using the `predict` method.\nThe `estimator` is the only required argument. If the `file_path` argument\nis not specified then the result is automatically written to a temporary file.\nThe predict method returns an rasterio.io.DatasetReader object which is open.\n\n::: {#499621f4 .cell execution_count=8}\n``` {.python .cell-code}\n# prediction\nresult = stack.predict(estimator=lr, dtype='int16', nodata=0)\nresult_probs = stack.predict_proba(estimator=lr)\n\n# plot classification result\nresult.iloc[0].cmap = \"Dark2\"\nresult.iloc[0].categorical = True\n\nresult.plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](landcover_files/figure-html/cell-9-output-1.png){width=552 height=427}\n:::\n:::\n\n\nThe `predict_proba` method can be used to output class probabilities as\na multi-band raster (a band for each class probability). In the latter case,\n`indexes` can also be supplied if you only want to output the probabilities\nfor a particular class, or list of classes, by supplying the indices of those\nclasses:\n\n::: {#40b27083 .cell execution_count=9}\n``` {.python .cell-code}\nresult_probs.plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-stderr}\n```\n/Users/stevenpawley/Library/Caches/pypoetry/virtualenvs/pyspatialml-NqZ1tMUm-py3.11/lib/python3.11/site-packages/matplotlib/image.py:499: RuntimeWarning: overflow encountered in divide\n A_scaled /= ((a_max - a_min) / frac)\n```\n:::\n\n::: {.cell-output .cell-output-display}\n![](landcover_files/figure-html/cell-10-output-2.png){width=602 height=372}\n:::\n:::\n\n\n", + "supporting": [ + "landcover_files" + ], + "filters": [], + "includes": { + "include-in-header": [ + "\n\n\n" + ] + } + } } \ No newline at end of file diff --git a/_freeze/docs/multitarget-regression-soil-properties/execute-results/html.json b/_freeze/docs/multitarget-regression-soil-properties/execute-results/html.json index 8361b39..dfbb1ee 100644 --- a/_freeze/docs/multitarget-regression-soil-properties/execute-results/html.json +++ b/_freeze/docs/multitarget-regression-soil-properties/execute-results/html.json @@ -1,16 +1,16 @@ -{ - "hash": "308efad333d88256311ff71d4af11322", - "result": { - "engine": "jupyter", - "markdown": "---\ntitle: Multi-Target Spatial Prediction using the Meuse Dataset\nformat:\n html:\n code-fold: false\n---\n\nHere we are using the meuse dataset which is included in the pyspatialml package as an example of performing a spatial model and prediction. We can access the datasets using the `pyspatialml.datasets` module:\n\n::: {#9f01e6dc .cell execution_count=1}\n``` {.python .cell-code}\nfrom copy import deepcopy\nfrom tempfile import NamedTemporaryFile\nimport geopandas as gpd\nimport numpy as np\nfrom pyspatialml import Raster\nfrom pyspatialml.preprocessing import xy_coordinates, distance_to_corners\nimport pyspatialml.datasets.meuse as ms\n\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import cm\n```\n:::\n\n\n::: {#745fc2a1 .cell execution_count=2}\n``` {.python .cell-code}\npredictor_files = ms.predictors\ntraining_pts_file = ms.meuse\n```\n:::\n\n\n::: {#099cf2fd .cell execution_count=3}\n``` {.python .cell-code}\nstack = Raster(predictor_files)\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=3}\n```\ndict_keys(['chnl_dist', 'dem', 'dist', 'ffreq', 'landimg2', 'landimg3', 'landimg4', 'mrvbf', 'rsp', 'slope', 'soil', 'twi'])\n```\n:::\n:::\n\n\nPyspatialml implements pandas-style indexing for `Raster` objects, using `Raster.loc` to index by the name of the raster, and `Raster.iloc` to select by index. This method also accepts slices. Label-based indexing is also provided directly by the __getattr_ magic method, i.e. `Raster[name]` or for multiple layers `Raster[(names)]`.\n\nFor example we can remove layers from Raster object using the `Raster.drop` method, or by subsetting the raster:\n\n::: {#c1e14c46 .cell execution_count=4}\n``` {.python .cell-code}\nstack.drop('ffreq')\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nRaster Object Containing 11 Layers\n attribute values\n0 names [chnl_dist, dem, dist, landimg2, landimg3, lan...\n1 files [/Users/stevenpawley/GitHub/Pyspatialml/pyspat...\n2 rows 104\n3 cols 78\n4 res (40.0, 40.0)\n5 nodatavals [-99999.0, -99999.0, -1.0, -1.0, -1.0, -1.0, -...\n```\n:::\n\n::: {.cell-output .cell-output-display execution_count=4}\n```\n\n```\n:::\n:::\n\n\nWe can store matplotlib cmaps as an attribute within each layer in the Raster:\n\n::: {#15a7b907 .cell execution_count=5}\n``` {.python .cell-code}\nstack.chnl_dist.cmap = 'RdBu'\nstack.dem.cmap = 'terrain'\nstack.dist.cmap = 'Reds'\nstack.landimg2.cmap = 'Greys'\nstack.landimg3.cmap = 'Greys'\nstack.landimg4.cmap = 'Greys'\nstack.landimg4.cmap = 'Greys'\nstack.mrvbf.cmap = 'jet'\nstack.rsp.cmap = 'gnuplot2'\nstack.slope.cmap = 'PuRd'\nstack.soil.cmap = 'Set2'\nstack.twi.cmap = 'coolwarm'\n```\n:::\n\n\nPlot the predictors in the Raster object as a raster matrix:\n\n::: {#f4a33f49 .cell execution_count=6}\n``` {.python .cell-code}\nmpl.style.use('seaborn-v0_8')\naxs = stack.plot(figsize=(9, 7))\nax = axs.flatten()[10]\nim = ax.images\nim[0].colorbar.set_ticks([1,2,3])\nax = axs.flatten()[8]\nax.tick_params(axis='x', labelrotation=65)\n\nplt.tight_layout()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-7-output-1.png){width=845 height=662}\n:::\n:::\n\n\n## Feature Engineering\n\nWe want the prediction results to be dependent on the spatial locations of the training data. So to include spatial information, coordinate grids can be generated and added to the Raster object:\n\n::: {#e1c12410 .cell execution_count=7}\n``` {.python .cell-code}\nxy_layer = xy_coordinates(\n layer=stack.iloc[0], \n file_path=NamedTemporaryFile(suffix=\".tif\").name\n)\n```\n:::\n\n\n::: {#009adcf9 .cell execution_count=8}\n``` {.python .cell-code}\nxy_layer = xy_coordinates(\n layer=stack.iloc[0], \n file_path=NamedTemporaryFile(suffix=\".tif\").name\n)\n\nedms = distance_to_corners(\n layer=stack.iloc[0], \n file_path=NamedTemporaryFile(suffix=\".tif\").name\n)\nedms.rename(\n {old: new for (old, new) in zip(edms.names, [\"tl\", \"tr\", \"bl\", \"br\", \"c\"])},\n in_place=True\n)\n\nedms.plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-9-output-1.png){width=660 height=458}\n:::\n:::\n\n\nAppend them to the Raster object:\n\n::: {#77aa65b8 .cell execution_count=9}\n``` {.python .cell-code}\nstack = stack.append([xy_layer, edms])\n```\n:::\n\n\nPlot the new predictors:\n\n::: {#32cbc2d4 .cell execution_count=10}\n``` {.python .cell-code}\naxs = stack.plot(figsize=(9, 7))\nax = axs.flatten()[10]\nim = ax.images\nim[0].colorbar.set_ticks([1,2,3])\nax = axs.flatten()[8]\nax.tick_params(axis='x', labelrotation=65)\n\nplt.tight_layout()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-11-output-1.png){width=847 height=663}\n:::\n:::\n\n\nThe area that is filled by some of the grids is different. This doesn't matter for the prediction because pixels in the Raster object that include some NaNs in some of the layers will be removed. However, the plots could potentially be given a cleaner look. We can use the Raster.intersect method to fix this:\n\n::: {#76c21e07 .cell execution_count=11}\n``` {.python .cell-code}\nstack = stack.intersect()\n```\n:::\n\n\n::: {#91d55829 .cell execution_count=12}\n``` {.python .cell-code}\naxs = stack.plot(figsize=(9, 7))\nax = axs.flatten()[10]\nim = ax.images\nim[0].colorbar.set_ticks([1,2,3])\nax = axs.flatten()[8]\nax.tick_params(axis='x', labelrotation=65)\n\nplt.tight_layout()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-13-output-1.png){width=847 height=663}\n:::\n:::\n\n\n## Read the Meuse Dataset\n\n::: {#37936033 .cell execution_count=13}\n``` {.python .cell-code}\ntraining_pts = gpd.read_file(training_pts_file)\ntraining_pts.head()\n```\n\n::: {.cell-output .cell-output-display execution_count=13}\n```{=html}\n
\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
cadmiumcopperleadzincelevdistomffreqsoillimelandusedist.mgeometry
011.785.0299.01022.07.9090.00135813.6111Ah50.0POINT (181072.000 333611.000)
18.681.0277.01141.06.9830.01222414.0111Ah30.0POINT (181025.000 333558.000)
26.568.0199.0640.07.8000.10302913.0111Ah150.0POINT (181165.000 333537.000)
32.681.0116.0257.07.6550.1900948.0120Ga270.0POINT (181298.000 333484.000)
42.848.0117.0269.07.4800.2770908.7120Ah380.0POINT (181307.000 333330.000)
\n
\n```\n:::\n:::\n\n\nPlot the training points:\n\n::: {#4a282046 .cell execution_count=14}\n``` {.python .cell-code}\nfrom mpl_toolkits.axes_grid1 import make_axes_locatable\n\nfig, axs = plt.subplots(2, 3, figsize=(8.5, 7))\n\nfor i, (ax, target) in enumerate(zip(axs.ravel(), ['cadmium', 'copper', 'lead', 'zinc', 'om'])):\n ax.set_title(target.title())\n divider = make_axes_locatable(ax)\n cax = divider.append_axes(\"right\", size=\"10%\", pad=0.05)\n training_pts.plot(column=target, legend=True, ax=ax, cax=cax, cmap='viridis')\n \n if i != 0:\n ax.set_yticklabels([])\n \n if i != 3:\n ax.set_xticklabels([])\n else:\n ax.tick_params(axis='x', labelrotation=65)\n \nfig.delaxes(axs.flatten()[i+1])\nplt.tight_layout()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-15-output-1.png){width=807 height=641}\n:::\n:::\n\n\n## Extract Raster Values at the Training Point Locations\n\nPixel values from a Raster object can be extracted using geometries within a geopandas.GeoDataFrame (points, lines, polygons) or by using labelled pixels from another raster with the same dimensions and crs.\n\nBy default the extracted values are returned as a geopandas.GeoDataFrame that contains the data and the coordinates of the pixels:\n\n::: {#e95a651d .cell execution_count=15}\n``` {.python .cell-code}\ntraining_df = stack.extract_vector(gdf=training_pts)\n\ntraining_df.index = training_df.index.get_level_values(\"geometry_idx\")\ntraining_df = training_df.merge(\n training_pts.loc[:, (\"lead\", \"cadmium\", \"copper\", \"zinc\", \"om\")], \n left_index=True, \n right_index=True\n) \n```\n:::\n\n\n::: {#2a20967c .cell execution_count=16}\n``` {.python .cell-code}\ntraining_df = training_df.dropna()\ntraining_df.head()\n```\n\n::: {.cell-output .cell-output-display execution_count=16}\n```{=html}\n
\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
chnl_distdemdistffreqlandimg2landimg3landimg4mrvbfrspslope...trblbrcgeometryleadcadmiumcopperzincom
geometry_idx
00.0000003214.00.0013581.097.092.0192.03.523824e-060.0000001.423307...12.369317119.268608100.71743055.470715POINT (181072.000 333611.000)299.011.785.01022.013.6
179.8498543402.00.0122241.0160.0183.0183.09.879866e-060.0820851.286004...13.928389117.04699798.85848253.235325POINT (181025.000 333558.000)277.08.681.01141.014.0
20.0000003277.00.1030291.0178.0209.0179.01.340742e-030.0000000.674711...10.295630119.28118198.41239955.226807POINT (181165.000 333537.000)199.06.568.0640.013.0
3184.7431643563.00.1900941.0114.0135.0152.06.547428e-070.1923251.413479...8.485281120.20815397.18538756.035702POINT (181298.000 333484.000)116.02.681.0257.08.0
416.7685553406.00.2770901.0133.0154.0151.01.588824e-030.0166890.531276...11.661903117.00427293.19334452.801514POINT (181307.000 333330.000)117.02.848.0269.08.7
\n

5 rows × 25 columns

\n
\n```\n:::\n:::\n\n\n## Developing a Machine Learning Model\n\nHere we are going to create a machine learning pipeline that correctly handles categorical predictors via one-hot encoding:\n\n::: {#18f93158 .cell execution_count=17}\n``` {.python .cell-code}\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=17}\n```\ndict_keys(['chnl_dist', 'dem', 'dist', 'ffreq', 'landimg2', 'landimg3', 'landimg4', 'mrvbf', 'rsp', 'slope', 'soil', 'twi', 'x_coordinates', 'y_coordinates', 'tl', 'tr', 'bl', 'br', 'c'])\n```\n:::\n:::\n\n\n::: {#3ef2e2ba .cell execution_count=18}\n``` {.python .cell-code}\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.ensemble import ExtraTreesRegressor\nfrom sklearn.preprocessing import OneHotEncoder\nfrom sklearn.compose import ColumnTransformer\n\nsoil_idx = [i for i, name in enumerate(stack.names) if name == 'soil']\n\ntrans = ColumnTransformer([\n ('ohe', OneHotEncoder(categories='auto', handle_unknown='ignore'), soil_idx)\n ], remainder='passthrough')\n\net = ExtraTreesRegressor(n_estimators=500, n_jobs=-1, random_state=1234)\net = Pipeline([\n ('preproc', trans),\n ('regressor', et)])\n```\n:::\n\n\nNow we can separate our response and predictor variables and train the model:\n\n::: {#1761a1ea .cell execution_count=19}\n``` {.python .cell-code}\nX = training_df.loc[:, stack.names]\ny = training_df.loc[:, ['lead', 'cadmium', 'copper', 'zinc', 'om']]\net.fit(X, y)\n```\n\n::: {.cell-output .cell-output-display execution_count=19}\n```{=html}\n
Pipeline(steps=[('preproc',\n                 ColumnTransformer(remainder='passthrough',\n                                   transformers=[('ohe',\n                                                  OneHotEncoder(handle_unknown='ignore'),\n                                                  [10])])),\n                ('regressor',\n                 ExtraTreesRegressor(n_estimators=500, n_jobs=-1,\n                                     random_state=1234))])
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
\n```\n:::\n:::\n\n\nTo evaluate the performance of the model, we will use 10-fold cross validation:\n\n::: {#92dc1c25 .cell execution_count=20}\n``` {.python .cell-code}\nfrom sklearn.model_selection import cross_validate, KFold\n\nouter = KFold(n_splits=10, shuffle=True, random_state=1234)\nscores = cross_validate(et, X, y, scoring='neg_mean_squared_error', cv=10, n_jobs=1)\nrmse = np.sqrt(-scores['test_score']).mean()\n\nprint(\"Our RMSE score is {}\".format(rmse))\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nOur RMSE score is 105.19227221271413\n```\n:::\n:::\n\n\n## Feature Importances\n\n::: {#3de44b19 .cell execution_count=21}\n``` {.python .cell-code}\nohe_names = deepcopy(list(stack.names))\nohe_names.insert(soil_idx[0], 'soil1')\nohe_names.insert(soil_idx[0], 'soil2')\nohe_names = np.array(ohe_names)\n```\n:::\n\n\n::: {#dde4cc5c .cell execution_count=22}\n``` {.python .cell-code}\nmpl.style.use('ggplot')\n\nfimp = et.named_steps['regressor'].feature_importances_\n\nfig, ax = plt.subplots(figsize=(4, 6))\nax.barh(y=ohe_names[fimp.argsort()], width=fimp[fimp.argsort()])\nax.set_xlabel('Feature Importance Score')\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-23-output-1.png){width=420 height=504}\n:::\n:::\n\n\n## Prediction on the Raster object\n\n::: {#a177af7f .cell execution_count=23}\n``` {.python .cell-code}\npreds = stack.predict(et)\npreds.rename(\n {old: new for old, new in zip(preds.names, ['lead', 'cadmium', 'copper', 'zinc', 'om'])},\n in_place=True\n)\npreds.lead.cmap = 'rainbow'\npreds.cadmium.cmap = 'rainbow'\npreds.copper.cmap = 'rainbow'\npreds.zinc.cmap = 'rainbow'\npreds.om.cmap = 'rainbow'\n```\n\n::: {.cell-output .cell-output-stderr}\n```\n/Users/stevenpawley/Library/Caches/pypoetry/virtualenvs/pyspatialml-NqZ1tMUm-py3.11/lib/python3.11/site-packages/sklearn/base.py:493: UserWarning: X does not have valid feature names, but OneHotEncoder was fitted with feature names\n warnings.warn(\n/Users/stevenpawley/Library/Caches/pypoetry/virtualenvs/pyspatialml-NqZ1tMUm-py3.11/lib/python3.11/site-packages/sklearn/base.py:493: UserWarning: X does not have valid feature names, but OneHotEncoder was fitted with feature names\n warnings.warn(\n```\n:::\n:::\n\n\nPlot the results:\n\n::: {#efd9a537 .cell execution_count=24}\n``` {.python .cell-code}\npreds.plot(out_shape=(200, 200), title_fontsize=14, figsize=(10, 8))\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-25-output-1.png){width=822 height=634}\n:::\n:::\n\n\n", - "supporting": [ - "multitarget-regression-soil-properties_files/figure-html" - ], - "filters": [], - "includes": { - "include-in-header": [ - "\n\n\n" - ] - } - } +{ + "hash": "308efad333d88256311ff71d4af11322", + "result": { + "engine": "jupyter", + "markdown": "---\ntitle: Multi-Target Spatial Prediction using the Meuse Dataset\nformat:\n html:\n code-fold: false\n---\n\nHere we are using the meuse dataset which is included in the pyspatialml package as an example of performing a spatial model and prediction. We can access the datasets using the `pyspatialml.datasets` module:\n\n::: {#9f01e6dc .cell execution_count=1}\n``` {.python .cell-code}\nfrom copy import deepcopy\nfrom tempfile import NamedTemporaryFile\nimport geopandas as gpd\nimport numpy as np\nfrom pyspatialml import Raster\nfrom pyspatialml.preprocessing import xy_coordinates, distance_to_corners\nimport pyspatialml.datasets.meuse as ms\n\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom matplotlib import cm\n```\n:::\n\n\n::: {#745fc2a1 .cell execution_count=2}\n``` {.python .cell-code}\npredictor_files = ms.predictors\ntraining_pts_file = ms.meuse\n```\n:::\n\n\n::: {#099cf2fd .cell execution_count=3}\n``` {.python .cell-code}\nstack = Raster(predictor_files)\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=3}\n```\ndict_keys(['chnl_dist', 'dem', 'dist', 'ffreq', 'landimg2', 'landimg3', 'landimg4', 'mrvbf', 'rsp', 'slope', 'soil', 'twi'])\n```\n:::\n:::\n\n\nPyspatialml implements pandas-style indexing for `Raster` objects, using `Raster.loc` to index by the name of the raster, and `Raster.iloc` to select by index. This method also accepts slices. Label-based indexing is also provided directly by the __getattr_ magic method, i.e. `Raster[name]` or for multiple layers `Raster[(names)]`.\n\nFor example we can remove layers from Raster object using the `Raster.drop` method, or by subsetting the raster:\n\n::: {#c1e14c46 .cell execution_count=4}\n``` {.python .cell-code}\nstack.drop('ffreq')\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nRaster Object Containing 11 Layers\n attribute values\n0 names [chnl_dist, dem, dist, landimg2, landimg3, lan...\n1 files [/Users/stevenpawley/GitHub/Pyspatialml/pyspat...\n2 rows 104\n3 cols 78\n4 res (40.0, 40.0)\n5 nodatavals [-99999.0, -99999.0, -1.0, -1.0, -1.0, -1.0, -...\n```\n:::\n\n::: {.cell-output .cell-output-display execution_count=4}\n```\n\n```\n:::\n:::\n\n\nWe can store matplotlib cmaps as an attribute within each layer in the Raster:\n\n::: {#15a7b907 .cell execution_count=5}\n``` {.python .cell-code}\nstack.chnl_dist.cmap = 'RdBu'\nstack.dem.cmap = 'terrain'\nstack.dist.cmap = 'Reds'\nstack.landimg2.cmap = 'Greys'\nstack.landimg3.cmap = 'Greys'\nstack.landimg4.cmap = 'Greys'\nstack.landimg4.cmap = 'Greys'\nstack.mrvbf.cmap = 'jet'\nstack.rsp.cmap = 'gnuplot2'\nstack.slope.cmap = 'PuRd'\nstack.soil.cmap = 'Set2'\nstack.twi.cmap = 'coolwarm'\n```\n:::\n\n\nPlot the predictors in the Raster object as a raster matrix:\n\n::: {#f4a33f49 .cell execution_count=6}\n``` {.python .cell-code}\nmpl.style.use('seaborn-v0_8')\naxs = stack.plot(figsize=(9, 7))\nax = axs.flatten()[10]\nim = ax.images\nim[0].colorbar.set_ticks([1,2,3])\nax = axs.flatten()[8]\nax.tick_params(axis='x', labelrotation=65)\n\nplt.tight_layout()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-7-output-1.png){width=845 height=662}\n:::\n:::\n\n\n## Feature Engineering\n\nWe want the prediction results to be dependent on the spatial locations of the training data. So to include spatial information, coordinate grids can be generated and added to the Raster object:\n\n::: {#e1c12410 .cell execution_count=7}\n``` {.python .cell-code}\nxy_layer = xy_coordinates(\n layer=stack.iloc[0], \n file_path=NamedTemporaryFile(suffix=\".tif\").name\n)\n```\n:::\n\n\n::: {#009adcf9 .cell execution_count=8}\n``` {.python .cell-code}\nxy_layer = xy_coordinates(\n layer=stack.iloc[0], \n file_path=NamedTemporaryFile(suffix=\".tif\").name\n)\n\nedms = distance_to_corners(\n layer=stack.iloc[0], \n file_path=NamedTemporaryFile(suffix=\".tif\").name\n)\nedms.rename(\n {old: new for (old, new) in zip(edms.names, [\"tl\", \"tr\", \"bl\", \"br\", \"c\"])},\n in_place=True\n)\n\nedms.plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-9-output-1.png){width=660 height=458}\n:::\n:::\n\n\nAppend them to the Raster object:\n\n::: {#77aa65b8 .cell execution_count=9}\n``` {.python .cell-code}\nstack = stack.append([xy_layer, edms])\n```\n:::\n\n\nPlot the new predictors:\n\n::: {#32cbc2d4 .cell execution_count=10}\n``` {.python .cell-code}\naxs = stack.plot(figsize=(9, 7))\nax = axs.flatten()[10]\nim = ax.images\nim[0].colorbar.set_ticks([1,2,3])\nax = axs.flatten()[8]\nax.tick_params(axis='x', labelrotation=65)\n\nplt.tight_layout()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-11-output-1.png){width=847 height=663}\n:::\n:::\n\n\nThe area that is filled by some of the grids is different. This doesn't matter for the prediction because pixels in the Raster object that include some NaNs in some of the layers will be removed. However, the plots could potentially be given a cleaner look. We can use the Raster.intersect method to fix this:\n\n::: {#76c21e07 .cell execution_count=11}\n``` {.python .cell-code}\nstack = stack.intersect()\n```\n:::\n\n\n::: {#91d55829 .cell execution_count=12}\n``` {.python .cell-code}\naxs = stack.plot(figsize=(9, 7))\nax = axs.flatten()[10]\nim = ax.images\nim[0].colorbar.set_ticks([1,2,3])\nax = axs.flatten()[8]\nax.tick_params(axis='x', labelrotation=65)\n\nplt.tight_layout()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-13-output-1.png){width=847 height=663}\n:::\n:::\n\n\n## Read the Meuse Dataset\n\n::: {#37936033 .cell execution_count=13}\n``` {.python .cell-code}\ntraining_pts = gpd.read_file(training_pts_file)\ntraining_pts.head()\n```\n\n::: {.cell-output .cell-output-display execution_count=13}\n```{=html}\n
\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
cadmiumcopperleadzincelevdistomffreqsoillimelandusedist.mgeometry
011.785.0299.01022.07.9090.00135813.6111Ah50.0POINT (181072.000 333611.000)
18.681.0277.01141.06.9830.01222414.0111Ah30.0POINT (181025.000 333558.000)
26.568.0199.0640.07.8000.10302913.0111Ah150.0POINT (181165.000 333537.000)
32.681.0116.0257.07.6550.1900948.0120Ga270.0POINT (181298.000 333484.000)
42.848.0117.0269.07.4800.2770908.7120Ah380.0POINT (181307.000 333330.000)
\n
\n```\n:::\n:::\n\n\nPlot the training points:\n\n::: {#4a282046 .cell execution_count=14}\n``` {.python .cell-code}\nfrom mpl_toolkits.axes_grid1 import make_axes_locatable\n\nfig, axs = plt.subplots(2, 3, figsize=(8.5, 7))\n\nfor i, (ax, target) in enumerate(zip(axs.ravel(), ['cadmium', 'copper', 'lead', 'zinc', 'om'])):\n ax.set_title(target.title())\n divider = make_axes_locatable(ax)\n cax = divider.append_axes(\"right\", size=\"10%\", pad=0.05)\n training_pts.plot(column=target, legend=True, ax=ax, cax=cax, cmap='viridis')\n \n if i != 0:\n ax.set_yticklabels([])\n \n if i != 3:\n ax.set_xticklabels([])\n else:\n ax.tick_params(axis='x', labelrotation=65)\n \nfig.delaxes(axs.flatten()[i+1])\nplt.tight_layout()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-15-output-1.png){width=807 height=641}\n:::\n:::\n\n\n## Extract Raster Values at the Training Point Locations\n\nPixel values from a Raster object can be extracted using geometries within a geopandas.GeoDataFrame (points, lines, polygons) or by using labelled pixels from another raster with the same dimensions and crs.\n\nBy default the extracted values are returned as a geopandas.GeoDataFrame that contains the data and the coordinates of the pixels:\n\n::: {#e95a651d .cell execution_count=15}\n``` {.python .cell-code}\ntraining_df = stack.extract_vector(gdf=training_pts)\n\ntraining_df.index = training_df.index.get_level_values(\"geometry_idx\")\ntraining_df = training_df.merge(\n training_pts.loc[:, (\"lead\", \"cadmium\", \"copper\", \"zinc\", \"om\")], \n left_index=True, \n right_index=True\n) \n```\n:::\n\n\n::: {#2a20967c .cell execution_count=16}\n``` {.python .cell-code}\ntraining_df = training_df.dropna()\ntraining_df.head()\n```\n\n::: {.cell-output .cell-output-display execution_count=16}\n```{=html}\n
\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
chnl_distdemdistffreqlandimg2landimg3landimg4mrvbfrspslope...trblbrcgeometryleadcadmiumcopperzincom
geometry_idx
00.0000003214.00.0013581.097.092.0192.03.523824e-060.0000001.423307...12.369317119.268608100.71743055.470715POINT (181072.000 333611.000)299.011.785.01022.013.6
179.8498543402.00.0122241.0160.0183.0183.09.879866e-060.0820851.286004...13.928389117.04699798.85848253.235325POINT (181025.000 333558.000)277.08.681.01141.014.0
20.0000003277.00.1030291.0178.0209.0179.01.340742e-030.0000000.674711...10.295630119.28118198.41239955.226807POINT (181165.000 333537.000)199.06.568.0640.013.0
3184.7431643563.00.1900941.0114.0135.0152.06.547428e-070.1923251.413479...8.485281120.20815397.18538756.035702POINT (181298.000 333484.000)116.02.681.0257.08.0
416.7685553406.00.2770901.0133.0154.0151.01.588824e-030.0166890.531276...11.661903117.00427293.19334452.801514POINT (181307.000 333330.000)117.02.848.0269.08.7
\n

5 rows × 25 columns

\n
\n```\n:::\n:::\n\n\n## Developing a Machine Learning Model\n\nHere we are going to create a machine learning pipeline that correctly handles categorical predictors via one-hot encoding:\n\n::: {#18f93158 .cell execution_count=17}\n``` {.python .cell-code}\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=17}\n```\ndict_keys(['chnl_dist', 'dem', 'dist', 'ffreq', 'landimg2', 'landimg3', 'landimg4', 'mrvbf', 'rsp', 'slope', 'soil', 'twi', 'x_coordinates', 'y_coordinates', 'tl', 'tr', 'bl', 'br', 'c'])\n```\n:::\n:::\n\n\n::: {#3ef2e2ba .cell execution_count=18}\n``` {.python .cell-code}\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.ensemble import ExtraTreesRegressor\nfrom sklearn.preprocessing import OneHotEncoder\nfrom sklearn.compose import ColumnTransformer\n\nsoil_idx = [i for i, name in enumerate(stack.names) if name == 'soil']\n\ntrans = ColumnTransformer([\n ('ohe', OneHotEncoder(categories='auto', handle_unknown='ignore'), soil_idx)\n ], remainder='passthrough')\n\net = ExtraTreesRegressor(n_estimators=500, n_jobs=-1, random_state=1234)\net = Pipeline([\n ('preproc', trans),\n ('regressor', et)])\n```\n:::\n\n\nNow we can separate our response and predictor variables and train the model:\n\n::: {#1761a1ea .cell execution_count=19}\n``` {.python .cell-code}\nX = training_df.loc[:, stack.names]\ny = training_df.loc[:, ['lead', 'cadmium', 'copper', 'zinc', 'om']]\net.fit(X, y)\n```\n\n::: {.cell-output .cell-output-display execution_count=19}\n```{=html}\n
Pipeline(steps=[('preproc',\n                 ColumnTransformer(remainder='passthrough',\n                                   transformers=[('ohe',\n                                                  OneHotEncoder(handle_unknown='ignore'),\n                                                  [10])])),\n                ('regressor',\n                 ExtraTreesRegressor(n_estimators=500, n_jobs=-1,\n                                     random_state=1234))])
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
\n```\n:::\n:::\n\n\nTo evaluate the performance of the model, we will use 10-fold cross validation:\n\n::: {#92dc1c25 .cell execution_count=20}\n``` {.python .cell-code}\nfrom sklearn.model_selection import cross_validate, KFold\n\nouter = KFold(n_splits=10, shuffle=True, random_state=1234)\nscores = cross_validate(et, X, y, scoring='neg_mean_squared_error', cv=10, n_jobs=1)\nrmse = np.sqrt(-scores['test_score']).mean()\n\nprint(\"Our RMSE score is {}\".format(rmse))\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nOur RMSE score is 105.19227221271413\n```\n:::\n:::\n\n\n## Feature Importances\n\n::: {#3de44b19 .cell execution_count=21}\n``` {.python .cell-code}\nohe_names = deepcopy(list(stack.names))\nohe_names.insert(soil_idx[0], 'soil1')\nohe_names.insert(soil_idx[0], 'soil2')\nohe_names = np.array(ohe_names)\n```\n:::\n\n\n::: {#dde4cc5c .cell execution_count=22}\n``` {.python .cell-code}\nmpl.style.use('ggplot')\n\nfimp = et.named_steps['regressor'].feature_importances_\n\nfig, ax = plt.subplots(figsize=(4, 6))\nax.barh(y=ohe_names[fimp.argsort()], width=fimp[fimp.argsort()])\nax.set_xlabel('Feature Importance Score')\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-23-output-1.png){width=420 height=504}\n:::\n:::\n\n\n## Prediction on the Raster object\n\n::: {#a177af7f .cell execution_count=23}\n``` {.python .cell-code}\npreds = stack.predict(et)\npreds.rename(\n {old: new for old, new in zip(preds.names, ['lead', 'cadmium', 'copper', 'zinc', 'om'])},\n in_place=True\n)\npreds.lead.cmap = 'rainbow'\npreds.cadmium.cmap = 'rainbow'\npreds.copper.cmap = 'rainbow'\npreds.zinc.cmap = 'rainbow'\npreds.om.cmap = 'rainbow'\n```\n\n::: {.cell-output .cell-output-stderr}\n```\n/Users/stevenpawley/Library/Caches/pypoetry/virtualenvs/pyspatialml-NqZ1tMUm-py3.11/lib/python3.11/site-packages/sklearn/base.py:493: UserWarning: X does not have valid feature names, but OneHotEncoder was fitted with feature names\n warnings.warn(\n/Users/stevenpawley/Library/Caches/pypoetry/virtualenvs/pyspatialml-NqZ1tMUm-py3.11/lib/python3.11/site-packages/sklearn/base.py:493: UserWarning: X does not have valid feature names, but OneHotEncoder was fitted with feature names\n warnings.warn(\n```\n:::\n:::\n\n\nPlot the results:\n\n::: {#efd9a537 .cell execution_count=24}\n``` {.python .cell-code}\npreds.plot(out_shape=(200, 200), title_fontsize=14, figsize=(10, 8))\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](multitarget-regression-soil-properties_files/figure-html/cell-25-output-1.png){width=822 height=634}\n:::\n:::\n\n\n", + "supporting": [ + "multitarget-regression-soil-properties_files/figure-html" + ], + "filters": [], + "includes": { + "include-in-header": [ + "\n\n\n" + ] + } + } } \ No newline at end of file diff --git a/_freeze/docs/plotting/execute-results/html.json b/_freeze/docs/plotting/execute-results/html.json index 88499cb..223f175 100644 --- a/_freeze/docs/plotting/execute-results/html.json +++ b/_freeze/docs/plotting/execute-results/html.json @@ -1,12 +1,12 @@ -{ - "hash": "3ef826464cf0375a69bc32e5b2fd457c", - "result": { - "engine": "jupyter", - "markdown": "---\ntitle: Plotting\nformat:\n html:\n code-fold: false\n toc: true\n---\n\nBoth `Raster` and `RasterLayer` objects include basic plotting methods. The\nplot method for a `RasterLayer` object produces a single raster plot using the\n`matplotlib.pyplot.imshow` method.\n\nFor convenience, plot settings such as color ramps and stretches can also be\nset for each RasterLayer using the `RasterLayer.cmap` that support matplotlib\ncmap's, and the `RasterLayer.norm` attribute to associate a\n`matplotlib.colors.Normalize` stretch with each RasterLayer:\n\nTo plot a single RasterLayer:\n\n::: {#06613099 .cell execution_count=1}\n``` {.python .cell-code}\nfrom pyspatialml import Raster\nfrom pyspatialml.datasets import nc\nimport matplotlib.pyplot as plt\n\nstack = Raster([nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7])\n\n# set RasterLayer color table\nstack.lsat7_2000_10.cmap = \"plasma\"\n\n# plot a single layer using an existing axis\nfig, ax = plt.subplots()\nstack.lsat7_2000_10.plot(ax=ax)\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](plotting_files/figure-html/cell-2-output-1.png){width=499 height=413}\n:::\n:::\n\n\nFor RasterLayers that represent categorical data types, e.g. land cover, then\nthe `RasterLayer.categorical=True` attribute will cause the cmap to be\nconverted to a discrete scale.\n\nThe default plot method for a `Raster` object produces a raster-matrix plot of\nthe individual RasterLayers. By default this plot preserves the plotting\nattributes of the individual rasters:\n\nPlot all RasterLayers in a Raster object:\n\n::: {#ee72d541 .cell execution_count=2}\n``` {.python .cell-code}\nstack.lsat7_2000_10.cmap = \"Blues\"\nstack.lsat7_2000_20.cmap = \"Greens\"\nstack.lsat7_2000_30.cmap = \"Reds\"\nstack.lsat7_2000_40.cmap = \"RdPu\"\nstack.lsat7_2000_50.cmap = \"autumn\"\nstack.lsat7_2000_70.cmap = \"hot\"\n\nstack.plot(\n title_fontsize=8,\n label_fontsize=6,\n legend_fontsize=6,\n names=[\"B1\", \"B2\", \"B3\", \"B4\", \"B5\", \"B7\"],\n fig_kwds={\"figsize\": (8, 4)},\n subplots_kwds={\"wspace\": 0.3}\n)\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](plotting_files/figure-html/cell-3-output-1.png){width=679 height=342}\n:::\n:::\n\n\nThe `Raster.plot` method also provides `cmap` and `norm` arguments that can be\nused to override the settings of the individual RasterLayers. Additional\nsettings can be passed to control plot layout using the `figure_kwds`,\n`legend_kwds` and `subplots_kwds` arguments.\n\n", - "supporting": [ - "plotting_files" - ], - "filters": [], - "includes": {} - } +{ + "hash": "3ef826464cf0375a69bc32e5b2fd457c", + "result": { + "engine": "jupyter", + "markdown": "---\ntitle: Plotting\nformat:\n html:\n code-fold: false\n toc: true\n---\n\nBoth `Raster` and `RasterLayer` objects include basic plotting methods. The\nplot method for a `RasterLayer` object produces a single raster plot using the\n`matplotlib.pyplot.imshow` method.\n\nFor convenience, plot settings such as color ramps and stretches can also be\nset for each RasterLayer using the `RasterLayer.cmap` that support matplotlib\ncmap's, and the `RasterLayer.norm` attribute to associate a\n`matplotlib.colors.Normalize` stretch with each RasterLayer:\n\nTo plot a single RasterLayer:\n\n::: {#06613099 .cell execution_count=1}\n``` {.python .cell-code}\nfrom pyspatialml import Raster\nfrom pyspatialml.datasets import nc\nimport matplotlib.pyplot as plt\n\nstack = Raster([nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7])\n\n# set RasterLayer color table\nstack.lsat7_2000_10.cmap = \"plasma\"\n\n# plot a single layer using an existing axis\nfig, ax = plt.subplots()\nstack.lsat7_2000_10.plot(ax=ax)\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](plotting_files/figure-html/cell-2-output-1.png){width=499 height=413}\n:::\n:::\n\n\nFor RasterLayers that represent categorical data types, e.g. land cover, then\nthe `RasterLayer.categorical=True` attribute will cause the cmap to be\nconverted to a discrete scale.\n\nThe default plot method for a `Raster` object produces a raster-matrix plot of\nthe individual RasterLayers. By default this plot preserves the plotting\nattributes of the individual rasters:\n\nPlot all RasterLayers in a Raster object:\n\n::: {#ee72d541 .cell execution_count=2}\n``` {.python .cell-code}\nstack.lsat7_2000_10.cmap = \"Blues\"\nstack.lsat7_2000_20.cmap = \"Greens\"\nstack.lsat7_2000_30.cmap = \"Reds\"\nstack.lsat7_2000_40.cmap = \"RdPu\"\nstack.lsat7_2000_50.cmap = \"autumn\"\nstack.lsat7_2000_70.cmap = \"hot\"\n\nstack.plot(\n title_fontsize=8,\n label_fontsize=6,\n legend_fontsize=6,\n names=[\"B1\", \"B2\", \"B3\", \"B4\", \"B5\", \"B7\"],\n fig_kwds={\"figsize\": (8, 4)},\n subplots_kwds={\"wspace\": 0.3}\n)\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](plotting_files/figure-html/cell-3-output-1.png){width=679 height=342}\n:::\n:::\n\n\nThe `Raster.plot` method also provides `cmap` and `norm` arguments that can be\nused to override the settings of the individual RasterLayers. Additional\nsettings can be passed to control plot layout using the `figure_kwds`,\n`legend_kwds` and `subplots_kwds` arguments.\n\n", + "supporting": [ + "plotting_files" + ], + "filters": [], + "includes": {} + } } \ No newline at end of file diff --git a/_freeze/docs/quickstart/execute-results/html.json b/_freeze/docs/quickstart/execute-results/html.json index 7c2fd54..029269b 100644 --- a/_freeze/docs/quickstart/execute-results/html.json +++ b/_freeze/docs/quickstart/execute-results/html.json @@ -1,16 +1,16 @@ -{ - "hash": "1ddec99dd2cca1a0e802081725fd4c4e", - "result": { - "engine": "jupyter", - "markdown": "---\ntitle: Quick start\nformat:\n html:\n code-fold: false\n toc: true\n---\n\n## Initiating a Raster Object\n\nWe are going to use a set of Landsat 7 bands contained within the nc example\ndata:\n\n::: {#4cb5727a .cell execution_count=1}\n``` {.python .cell-code}\nfrom pyspatialml import Raster\nimport pyspatialml.datasets.nc as nc\nimport matplotlib.pyplot as plt\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\n```\n:::\n\n\nThese raster datasets are aligned in terms of their extent and coordinate\nreference systems. We can 'stack' these into a Raster class so that we can\nperform machine learning related operations on the set of rasters:\n\n::: {#22e619cd .cell execution_count=2}\n``` {.python .cell-code}\nstack = Raster(predictors)\n```\n:::\n\n\nWhen a Raster object is created, the names to each layer are automatically\ncreated based on syntactically-correct versions of the file basenames:\n\n::: {#df26959e .cell execution_count=3}\n``` {.python .cell-code}\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=3}\n```\ndict_keys(['lsat7_2000_10', 'lsat7_2000_20', 'lsat7_2000_30', 'lsat7_2000_40', 'lsat7_2000_50', 'lsat7_2000_70'])\n```\n:::\n:::\n\n\nColor ramps and matplotlib.colors.Normalize objects can be assigned to each\nRasterLayer in the object using the `cmap` and `norm` attributes for\nconvenient in plotting:\n\n::: {#1cd341e5 .cell execution_count=4}\n``` {.python .cell-code}\nstack.lsat7_2000_10.cmap = \"Blues\"\nstack.lsat7_2000_20.cmap = \"Greens\"\nstack.lsat7_2000_30.cmap = \"Reds\"\nstack.lsat7_2000_40.cmap = \"RdPu\"\nstack.lsat7_2000_50.cmap = \"autumn\"\nstack.lsat7_2000_70.cmap = \"hot\"\n\nstack.plot(\n title_fontsize=8,\n label_fontsize=6,\n legend_fontsize=6,\n names=[\"B1\", \"B2\", \"B3\", \"B4\", \"B5\", \"B7\"],\n fig_kwds={\"figsize\": (8, 4)},\n subplots_kwds={\"wspace\": 0.3}\n)\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](quickstart_files/figure-html/cell-5-output-1.png){width=679 height=342}\n:::\n:::\n\n\n## Subsetting and Indexing\n\nIndexing of Raster objects is provided by several methods:\n\nThe ``Raster[keys]`` method enables key-based indexing using a name of a\nRasterLayer, or a list of names. Direct subsetting of a Raster object instance\nreturns a RasterLayer if only a single label is used, otherwise it always\nreturns a new Raster object containing only the selected layers.\n\nThe ``Raster.iloc[int, list, tuple, slice]`` method allows a Raster object\ninstance to be subset using integer-based indexing or slicing. The ``iloc``\nmethod returns a RasterLayer object if only a single index is used, otherwise\nit always returns a new Raster object containing only the selected layers.\n\nSubsetting of a Raster object instance can also occur by using attribute names\nin the form of ``Raster.name_of_layer``. Because only a single RasterLayer can\nbe subset at one time using this approach, a RasterLayer object is always\nreturned.\n\nExamples of methods to subset a Raster object:\n\n::: {#fdbfb00a .cell execution_count=5}\n``` {.python .cell-code}\n# subset based on position\nsingle_layer = stack.iloc[0]\n\n# subset using a slice\nnew_raster_obj = stack.iloc[0:3]\n\n# subset using labels\nsingle_layer = stack['lsat7_2000_10']\nsingle_layer = stack.lsat7_2000_10\n\n# list or tuple of keys\nnew_raster_obj = stack[('lsat7_2000_10', 'lsat7_2000_20')]\n```\n:::\n\n\nIterate through RasterLayers individually:\n\n::: {#d38cee29 .cell execution_count=6}\n``` {.python .cell-code}\nfor name, layer in stack.items():\n print(name, layer)\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nlsat7_2000_10 \nlsat7_2000_20 \nlsat7_2000_30 \nlsat7_2000_40 \nlsat7_2000_50 \nlsat7_2000_70 \n```\n:::\n:::\n\n\nReplace a RasterLayer with another:\n\n::: {#a66d0bd5 .cell execution_count=7}\n``` {.python .cell-code}\nstack.iloc[0] = Raster(nc.band7).iloc[0]\n\nstack.iloc[0].plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](quickstart_files/figure-html/cell-8-output-1.png){width=499 height=413}\n:::\n:::\n\n\n## Appending and Dropping Layers\n\nAppend layers from another Raster to the stack. Duplicate names are\nautomatically given a suffix.\n\n::: {#9d580c66 .cell execution_count=8}\n``` {.python .cell-code}\nstack.append(Raster(nc.band7), in_place=True)\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=8}\n```\ndict_keys(['lsat7_2000_10', 'lsat7_2000_20', 'lsat7_2000_30', 'lsat7_2000_40', 'lsat7_2000_50', 'lsat7_2000_70_1', 'lsat7_2000_70_2'])\n```\n:::\n:::\n\n\nRename RasterLayers using a dict of old_name : new_name pairs:\n\n::: {#35b931db .cell execution_count=9}\n``` {.python .cell-code}\nstack.names\nstack.rename({'lsat7_2000_30': 'new_name'}, in_place=True)\nstack.names\nstack.new_name\nstack['new_name']\n```\n\n::: {.cell-output .cell-output-display execution_count=9}\n```\n\n```\n:::\n:::\n\n\nDrop a RasterLayer:\n\n::: {#7517a32e .cell execution_count=10}\n``` {.python .cell-code}\nstack.names\nstack.drop(labels='lsat7_2000_70_1', in_place=True)\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=10}\n```\ndict_keys(['lsat7_2000_10', 'lsat7_2000_20', 'new_name', 'lsat7_2000_40', 'lsat7_2000_50', 'lsat7_2000_70_2'])\n```\n:::\n:::\n\n\n## Integration with Pandas\n\nData from a Raster object can converted into a `Pandas.DataDrame`, with each\npixel representing by a row, and columns reflecting the x, y coordinates and\nthe values of each RasterLayer in the Raster object:\n\n::: {#62ad6b48 .cell execution_count=11}\n``` {.python .cell-code}\nimport pandas as pd\n\ndf = stack.to_pandas(max_pixels=50000, resampling='nearest')\ndf.head()\n```\n\n::: {.cell-output .cell-output-display execution_count=11}\n```{=html}\n
\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
xylsat7_2000_10lsat7_2000_20new_namelsat7_2000_40lsat7_2000_50lsat7_2000_70_2
0630534.000000228114.0NaNNaNNaNNaNNaNNaN
1630562.558402228114.0NaNNaNNaNNaNNaNNaN
2630591.116803228114.0NaNNaNNaNNaNNaNNaN
3630619.675205228114.0NaNNaNNaNNaNNaNNaN
4630648.233607228114.0NaNNaNNaNNaNNaNNaN
\n
\n```\n:::\n:::\n\n\nThe original raster is up-sampled based on max_pixels and the resampling\nmethod, which uses all of resampling methods available in the underlying\nrasterio library for decimated reads.\n\n## Saving a Raster to File\n\nSave a Raster:\n\n::: {#a5671c9d .cell execution_count=12}\n``` {.python .cell-code}\nimport tempfile\n\ntmp_tif = tempfile.NamedTemporaryFile().name + '.tif'\nnewstack = stack.write(file_path=tmp_tif, nodata=-9999)\nnewstack.new_name.read()\nnewstack = None\n```\n:::\n\n\n", - "supporting": [ - "quickstart_files" - ], - "filters": [], - "includes": { - "include-in-header": [ - "\n\n\n" - ] - } - } +{ + "hash": "1ddec99dd2cca1a0e802081725fd4c4e", + "result": { + "engine": "jupyter", + "markdown": "---\ntitle: Quick start\nformat:\n html:\n code-fold: false\n toc: true\n---\n\n## Initiating a Raster Object\n\nWe are going to use a set of Landsat 7 bands contained within the nc example\ndata:\n\n::: {#4cb5727a .cell execution_count=1}\n``` {.python .cell-code}\nfrom pyspatialml import Raster\nimport pyspatialml.datasets.nc as nc\nimport matplotlib.pyplot as plt\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\n```\n:::\n\n\nThese raster datasets are aligned in terms of their extent and coordinate\nreference systems. We can 'stack' these into a Raster class so that we can\nperform machine learning related operations on the set of rasters:\n\n::: {#22e619cd .cell execution_count=2}\n``` {.python .cell-code}\nstack = Raster(predictors)\n```\n:::\n\n\nWhen a Raster object is created, the names to each layer are automatically\ncreated based on syntactically-correct versions of the file basenames:\n\n::: {#df26959e .cell execution_count=3}\n``` {.python .cell-code}\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=3}\n```\ndict_keys(['lsat7_2000_10', 'lsat7_2000_20', 'lsat7_2000_30', 'lsat7_2000_40', 'lsat7_2000_50', 'lsat7_2000_70'])\n```\n:::\n:::\n\n\nColor ramps and matplotlib.colors.Normalize objects can be assigned to each\nRasterLayer in the object using the `cmap` and `norm` attributes for\nconvenient in plotting:\n\n::: {#1cd341e5 .cell execution_count=4}\n``` {.python .cell-code}\nstack.lsat7_2000_10.cmap = \"Blues\"\nstack.lsat7_2000_20.cmap = \"Greens\"\nstack.lsat7_2000_30.cmap = \"Reds\"\nstack.lsat7_2000_40.cmap = \"RdPu\"\nstack.lsat7_2000_50.cmap = \"autumn\"\nstack.lsat7_2000_70.cmap = \"hot\"\n\nstack.plot(\n title_fontsize=8,\n label_fontsize=6,\n legend_fontsize=6,\n names=[\"B1\", \"B2\", \"B3\", \"B4\", \"B5\", \"B7\"],\n fig_kwds={\"figsize\": (8, 4)},\n subplots_kwds={\"wspace\": 0.3}\n)\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](quickstart_files/figure-html/cell-5-output-1.png){width=679 height=342}\n:::\n:::\n\n\n## Subsetting and Indexing\n\nIndexing of Raster objects is provided by several methods:\n\nThe ``Raster[keys]`` method enables key-based indexing using a name of a\nRasterLayer, or a list of names. Direct subsetting of a Raster object instance\nreturns a RasterLayer if only a single label is used, otherwise it always\nreturns a new Raster object containing only the selected layers.\n\nThe ``Raster.iloc[int, list, tuple, slice]`` method allows a Raster object\ninstance to be subset using integer-based indexing or slicing. The ``iloc``\nmethod returns a RasterLayer object if only a single index is used, otherwise\nit always returns a new Raster object containing only the selected layers.\n\nSubsetting of a Raster object instance can also occur by using attribute names\nin the form of ``Raster.name_of_layer``. Because only a single RasterLayer can\nbe subset at one time using this approach, a RasterLayer object is always\nreturned.\n\nExamples of methods to subset a Raster object:\n\n::: {#fdbfb00a .cell execution_count=5}\n``` {.python .cell-code}\n# subset based on position\nsingle_layer = stack.iloc[0]\n\n# subset using a slice\nnew_raster_obj = stack.iloc[0:3]\n\n# subset using labels\nsingle_layer = stack['lsat7_2000_10']\nsingle_layer = stack.lsat7_2000_10\n\n# list or tuple of keys\nnew_raster_obj = stack[('lsat7_2000_10', 'lsat7_2000_20')]\n```\n:::\n\n\nIterate through RasterLayers individually:\n\n::: {#d38cee29 .cell execution_count=6}\n``` {.python .cell-code}\nfor name, layer in stack.items():\n print(name, layer)\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nlsat7_2000_10 \nlsat7_2000_20 \nlsat7_2000_30 \nlsat7_2000_40 \nlsat7_2000_50 \nlsat7_2000_70 \n```\n:::\n:::\n\n\nReplace a RasterLayer with another:\n\n::: {#a66d0bd5 .cell execution_count=7}\n``` {.python .cell-code}\nstack.iloc[0] = Raster(nc.band7).iloc[0]\n\nstack.iloc[0].plot()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](quickstart_files/figure-html/cell-8-output-1.png){width=499 height=413}\n:::\n:::\n\n\n## Appending and Dropping Layers\n\nAppend layers from another Raster to the stack. Duplicate names are\nautomatically given a suffix.\n\n::: {#9d580c66 .cell execution_count=8}\n``` {.python .cell-code}\nstack.append(Raster(nc.band7), in_place=True)\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=8}\n```\ndict_keys(['lsat7_2000_10', 'lsat7_2000_20', 'lsat7_2000_30', 'lsat7_2000_40', 'lsat7_2000_50', 'lsat7_2000_70_1', 'lsat7_2000_70_2'])\n```\n:::\n:::\n\n\nRename RasterLayers using a dict of old_name : new_name pairs:\n\n::: {#35b931db .cell execution_count=9}\n``` {.python .cell-code}\nstack.names\nstack.rename({'lsat7_2000_30': 'new_name'}, in_place=True)\nstack.names\nstack.new_name\nstack['new_name']\n```\n\n::: {.cell-output .cell-output-display execution_count=9}\n```\n\n```\n:::\n:::\n\n\nDrop a RasterLayer:\n\n::: {#7517a32e .cell execution_count=10}\n``` {.python .cell-code}\nstack.names\nstack.drop(labels='lsat7_2000_70_1', in_place=True)\nstack.names\n```\n\n::: {.cell-output .cell-output-display execution_count=10}\n```\ndict_keys(['lsat7_2000_10', 'lsat7_2000_20', 'new_name', 'lsat7_2000_40', 'lsat7_2000_50', 'lsat7_2000_70_2'])\n```\n:::\n:::\n\n\n## Integration with Pandas\n\nData from a Raster object can converted into a `Pandas.DataDrame`, with each\npixel representing by a row, and columns reflecting the x, y coordinates and\nthe values of each RasterLayer in the Raster object:\n\n::: {#62ad6b48 .cell execution_count=11}\n``` {.python .cell-code}\nimport pandas as pd\n\ndf = stack.to_pandas(max_pixels=50000, resampling='nearest')\ndf.head()\n```\n\n::: {.cell-output .cell-output-display execution_count=11}\n```{=html}\n
\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
xylsat7_2000_10lsat7_2000_20new_namelsat7_2000_40lsat7_2000_50lsat7_2000_70_2
0630534.000000228114.0NaNNaNNaNNaNNaNNaN
1630562.558402228114.0NaNNaNNaNNaNNaNNaN
2630591.116803228114.0NaNNaNNaNNaNNaNNaN
3630619.675205228114.0NaNNaNNaNNaNNaNNaN
4630648.233607228114.0NaNNaNNaNNaNNaNNaN
\n
\n```\n:::\n:::\n\n\nThe original raster is up-sampled based on max_pixels and the resampling\nmethod, which uses all of resampling methods available in the underlying\nrasterio library for decimated reads.\n\n## Saving a Raster to File\n\nSave a Raster:\n\n::: {#a5671c9d .cell execution_count=12}\n``` {.python .cell-code}\nimport tempfile\n\ntmp_tif = tempfile.NamedTemporaryFile().name + '.tif'\nnewstack = stack.write(file_path=tmp_tif, nodata=-9999)\nnewstack.new_name.read()\nnewstack = None\n```\n:::\n\n\n", + "supporting": [ + "quickstart_files" + ], + "filters": [], + "includes": { + "include-in-header": [ + "\n\n\n" + ] + } + } } \ No newline at end of file diff --git a/_freeze/docs/sampling/execute-results/html.json b/_freeze/docs/sampling/execute-results/html.json index 4a2a5e8..9b6ba1e 100644 --- a/_freeze/docs/sampling/execute-results/html.json +++ b/_freeze/docs/sampling/execute-results/html.json @@ -1,16 +1,16 @@ -{ - "hash": "f4b98349d93d08e9877aa37589816d4f", - "result": { - "engine": "jupyter", - "markdown": "---\ntitle: Random Sampling\nformat:\n html:\n code-fold: false\n toc: true\n---\n\n## Random Uniform Sampling\n\nFor many spatial models, it is common to take a random sample of the\npredictors to represent a single class (i.e. an environmental background or\npseudo-absences in a binary classification model). The sample function is\nsupplied in the sampling module for this purpose:\n\n::: {#a3807ffc .cell execution_count=1}\n``` {.python .cell-code}\nfrom pyspatialml import Raster\nimport pyspatialml.datasets.nc as nc\nimport matplotlib.pyplot as plt\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\nstack = Raster(predictors)\n\n# extract training data using a random sample\ndf_rand = stack.sample(size=1000, random_state=1)\ndf_rand.plot()\n```\n\n::: {.cell-output .cell-output-display}\n![](sampling_files/figure-html/cell-2-output-1.png){width=480 height=411}\n:::\n:::\n\n\n## Stratified Random Sampling\n\nThe sample function also enables stratified random sampling based on passing a\ncategorical raster dataset to the strata argument. The categorical raster\nshould spatially overlap with the dataset to be sampled, but it does not need\nto be of the same grid resolution. This raster should be passed as a opened\nrasterio dataset:\n\n::: {#0245d53c .cell execution_count=2}\n``` {.python .cell-code}\nstrata = Raster(nc.strata)\ndf_strata = stack.sample(size=5, strata=strata, random_state=1)\ndf_strata = df_strata.dropna()\ndf_strata\n```\n\n::: {.cell-output .cell-output-display execution_count=2}\n```{=html}\n
\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
lsat7_2000_10lsat7_2000_20lsat7_2000_30lsat7_2000_40lsat7_2000_50lsat7_2000_70geometry
096.078.088.049.071.063.0POINT (641093.250 225135.750)
1113.0103.0122.066.0136.0110.0POINT (640979.250 222342.750)
382.066.067.064.076.052.0POINT (640095.750 225848.250)
499.088.095.056.098.078.0POINT (637559.250 226788.750)
581.069.076.073.0118.072.0POINT (635621.250 218324.250)
1091.078.081.077.097.073.0POINT (634709.250 221943.750)
1172.061.051.0104.091.047.0POINT (639269.250 220005.750)
1286.075.078.073.087.060.0POINT (639326.250 224964.750)
1371.053.048.059.078.046.0POINT (635222.250 218951.250)
1576.059.063.065.0114.064.0POINT (633027.750 218580.750)
1775.061.055.070.074.043.0POINT (633369.750 219435.750)
1878.066.069.069.0110.072.0POINT (633198.750 225506.250)
1968.052.040.079.058.030.0POINT (637986.750 222998.250)
2070.055.052.062.079.047.0POINT (635649.750 217440.750)
2271.053.048.064.077.042.0POINT (635564.250 222713.250)
2372.053.051.058.082.051.0POINT (633056.250 218324.250)
2681.078.079.034.041.028.0POINT (639297.750 223625.250)
2773.057.051.016.014.010.0POINT (635364.750 224736.750)
2873.057.052.055.057.040.0POINT (635535.750 223311.750)
30138.0120.0132.065.0129.0126.0POINT (634196.250 226190.250)
3172.060.047.069.082.046.0POINT (639810.750 219749.250)
32132.0122.0140.073.0171.0176.0POINT (640352.250 218238.750)
33170.0157.0176.080.0182.0183.0POINT (639924.750 219692.250)
34115.098.0106.060.0110.0102.0POINT (639953.250 219578.250)
\n
\n```\n:::\n:::\n\n\n", - "supporting": [ - "sampling_files" - ], - "filters": [], - "includes": { - "include-in-header": [ - "\n\n\n" - ] - } - } +{ + "hash": "f4b98349d93d08e9877aa37589816d4f", + "result": { + "engine": "jupyter", + "markdown": "---\ntitle: Random Sampling\nformat:\n html:\n code-fold: false\n toc: true\n---\n\n## Random Uniform Sampling\n\nFor many spatial models, it is common to take a random sample of the\npredictors to represent a single class (i.e. an environmental background or\npseudo-absences in a binary classification model). The sample function is\nsupplied in the sampling module for this purpose:\n\n::: {#a3807ffc .cell execution_count=1}\n``` {.python .cell-code}\nfrom pyspatialml import Raster\nimport pyspatialml.datasets.nc as nc\nimport matplotlib.pyplot as plt\n\npredictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]\nstack = Raster(predictors)\n\n# extract training data using a random sample\ndf_rand = stack.sample(size=1000, random_state=1)\ndf_rand.plot()\n```\n\n::: {.cell-output .cell-output-display}\n![](sampling_files/figure-html/cell-2-output-1.png){width=480 height=411}\n:::\n:::\n\n\n## Stratified Random Sampling\n\nThe sample function also enables stratified random sampling based on passing a\ncategorical raster dataset to the strata argument. The categorical raster\nshould spatially overlap with the dataset to be sampled, but it does not need\nto be of the same grid resolution. This raster should be passed as a opened\nrasterio dataset:\n\n::: {#0245d53c .cell execution_count=2}\n``` {.python .cell-code}\nstrata = Raster(nc.strata)\ndf_strata = stack.sample(size=5, strata=strata, random_state=1)\ndf_strata = df_strata.dropna()\ndf_strata\n```\n\n::: {.cell-output .cell-output-display execution_count=2}\n```{=html}\n
\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
lsat7_2000_10lsat7_2000_20lsat7_2000_30lsat7_2000_40lsat7_2000_50lsat7_2000_70geometry
096.078.088.049.071.063.0POINT (641093.250 225135.750)
1113.0103.0122.066.0136.0110.0POINT (640979.250 222342.750)
382.066.067.064.076.052.0POINT (640095.750 225848.250)
499.088.095.056.098.078.0POINT (637559.250 226788.750)
581.069.076.073.0118.072.0POINT (635621.250 218324.250)
1091.078.081.077.097.073.0POINT (634709.250 221943.750)
1172.061.051.0104.091.047.0POINT (639269.250 220005.750)
1286.075.078.073.087.060.0POINT (639326.250 224964.750)
1371.053.048.059.078.046.0POINT (635222.250 218951.250)
1576.059.063.065.0114.064.0POINT (633027.750 218580.750)
1775.061.055.070.074.043.0POINT (633369.750 219435.750)
1878.066.069.069.0110.072.0POINT (633198.750 225506.250)
1968.052.040.079.058.030.0POINT (637986.750 222998.250)
2070.055.052.062.079.047.0POINT (635649.750 217440.750)
2271.053.048.064.077.042.0POINT (635564.250 222713.250)
2372.053.051.058.082.051.0POINT (633056.250 218324.250)
2681.078.079.034.041.028.0POINT (639297.750 223625.250)
2773.057.051.016.014.010.0POINT (635364.750 224736.750)
2873.057.052.055.057.040.0POINT (635535.750 223311.750)
30138.0120.0132.065.0129.0126.0POINT (634196.250 226190.250)
3172.060.047.069.082.046.0POINT (639810.750 219749.250)
32132.0122.0140.073.0171.0176.0POINT (640352.250 218238.750)
33170.0157.0176.080.0182.0183.0POINT (639924.750 219692.250)
34115.098.0106.060.0110.0102.0POINT (639953.250 219578.250)
\n
\n```\n:::\n:::\n\n\n", + "supporting": [ + "sampling_files" + ], + "filters": [], + "includes": { + "include-in-header": [ + "\n\n\n" + ] + } + } } \ No newline at end of file diff --git a/_freeze/docs/spatial-features/execute-results/html.json b/_freeze/docs/spatial-features/execute-results/html.json index fee68bf..2e97717 100644 --- a/_freeze/docs/spatial-features/execute-results/html.json +++ b/_freeze/docs/spatial-features/execute-results/html.json @@ -1,12 +1,12 @@ -{ - "hash": "fb14d013abd3b63ee2a6abfe3f924f31", - "result": { - "engine": "jupyter", - "markdown": "---\ntitle: Incorporating Spatial Autocorrelation into Spatial Predictions\nformat:\n html:\n code-fold: false\n---\n\nSimilarly to example 1, we are using the meuse dataset again to perform a multi-target prediction of soil properties using a regression model. However, in this case we will attempt to account for spatial autocorrelation in the model directly by generating new features that are based on the distance-weighted means of surrounding spatial locations.\n\n::: {#0bcf6e26 .cell execution_count=1}\n``` {.python .cell-code}\nimport geopandas as gpd\nimport numpy as np\nfrom tempfile import NamedTemporaryFile\nfrom pyspatialml import Raster\nimport pyspatialml.datasets.meuse as ms\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\n```\n:::\n\n\n## Preparing the Raster Predictors\n\nImport the raster predictors from the `pyspatialml.datasets.meuse` module:\n\n::: {#ebe6123b .cell execution_count=2}\n``` {.python .cell-code}\npredictor_files = ms.predictors\ntraining_pts_file = ms.meuse\nstack = Raster(predictor_files)\nstack.drop('ffreq')\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nRaster Object Containing 11 Layers\n attribute values\n0 names [chnl_dist, dem, dist, landimg2, landimg3, lan...\n1 files [/Users/stevenpawley/GitHub/Pyspatialml/pyspat...\n2 rows 104\n3 cols 78\n4 res (40.0, 40.0)\n5 nodatavals [-99999.0, -99999.0, -1.0, -1.0, -1.0, -1.0, -...\n```\n:::\n\n::: {.cell-output .cell-output-display execution_count=2}\n```\n\n```\n:::\n:::\n\n\nIn order to generate new features from surrounding spatial locations, we need their x,y coordinates, which will will add to the stack of the raster predictors using the `pyspatialml.preprocessing.xy_coordinates` function:\n\n::: {#0393e2ac .cell execution_count=3}\n``` {.python .cell-code}\nfrom pyspatialml.preprocessing import xy_coordinates\n\nxy_layers = xy_coordinates(stack.iloc[0], NamedTemporaryFile(suffix=\".tif\").name)\nstack = stack.append(xy_layers, in_place=False)\n```\n:::\n\n\nQuickly plot the raster predictors:\n\n::: {#cba259ea .cell execution_count=4}\n``` {.python .cell-code}\nmpl.style.use('seaborn-v0_8')\naxs = stack.plot(figsize=(9, 7))\nax = axs.flatten()[10]\nim = ax.images\nim[0].colorbar.set_ticks([1,2,3])\nax = axs.flatten()[8]\nax.tick_params(axis='x', labelrotation=65)\n\nplt.tight_layout()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](spatial-features_files/figure-html/cell-5-output-1.png){width=817 height=663}\n:::\n:::\n\n\n## Extract the Training Data\n\nSpatially query the raster predictors at the training point locations:\n\n::: {#06d01eb2 .cell execution_count=5}\n``` {.python .cell-code}\ntraining_pts = gpd.read_file(training_pts_file)\ntraining_df = stack.extract_vector(gdf=training_pts)\n\ntraining_df.index = training_df.index.get_level_values(\"geometry_idx\")\ntraining_df = training_df.merge(\n training_pts.loc[:, (\"lead\", \"cadmium\", \"copper\", \"zinc\", \"om\")], \n left_index=True, \n right_index=True\n) \ntraining_df = training_df.dropna()\n```\n:::\n\n\nSplit the response/target variables from the predictors:\n\n::: {#3138a3c4 .cell execution_count=6}\n``` {.python .cell-code}\nX = training_df.loc[:, stack.names].values\ny = training_df.loc[:, ['lead', 'cadmium', 'copper', 'zinc', 'om']].values\n```\n:::\n\n\n## Develop a Spatially-Lagged Machine Learning Model\n\nAs well as using the ExtraTreeRegressor model which was also used in example 1, here we will use the custom `pyspatialml.estimators.SpatialLagRegressor` metalearner class to wrap the extratrees regressor into a model that adds a new feature based on the distance-weighted mean of spatially-proximal observations:\n\n::: {#e50ffc54 .cell execution_count=7}\n``` {.python .cell-code}\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.ensemble import ExtraTreesRegressor\nfrom sklearn.preprocessing import OneHotEncoder\nfrom sklearn.compose import ColumnTransformer\nfrom pyspatialml.transformers import KNNTransformer\nfrom sklearn.model_selection import cross_validate, KFold\nfrom sklearn.model_selection import GridSearchCV\n\n# define regressor\net = ExtraTreesRegressor(n_estimators=500, n_jobs=-1, random_state=1234)\n\nsoil_index = list(stack.names).index(\"soil\")\nxy_indexes = [list(stack.names).index(i) for i in [\"x_coordinates\", \"y_coordinates\"]]\n\npreproc = ColumnTransformer([\n ('ohe', OneHotEncoder(categories='auto', handle_unknown='ignore'), [soil_index]),\n ('lags', KNNTransformer(weights='distance', measure=\"mean\"), xy_indexes)\n], remainder='passthrough')\n\nwflow = Pipeline([\n ('preproc', preproc),\n ('regressor', et)\n])\n\nsearch_grid = {\"preproc__lags__n_neighbors\": [3, 5, 7, 9]}\ninner = KFold(n_splits=3, shuffle=True, random_state=1234)\nmodel = GridSearchCV(wflow, param_grid=search_grid, cv=inner, scoring=\"r2\")\n```\n:::\n\n\nFit the model and cross-validate:\n\n::: {#5d0230a7 .cell execution_count=8}\n``` {.python .cell-code}\nmodel = model.fit(X, y)\nmodel.best_params_\n```\n\n::: {.cell-output .cell-output-display execution_count=8}\n```\n{'preproc__lags__n_neighbors': 9}\n```\n:::\n:::\n\n\n::: {#4a619dd2 .cell execution_count=9}\n``` {.python .cell-code}\nouter = KFold(n_splits=10, shuffle=True, random_state=1234)\n\nscores = cross_validate(model, X, y, scoring='neg_mean_squared_error', cv=outer, n_jobs=1)\nrmse = np.sqrt(-scores['test_score']).mean()\n\nprint(\"Our RMSE score is {}\".format(rmse))\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nOur RMSE score is 102.27495341202624\n```\n:::\n:::\n\n\nComparing the RMSE score the the score obtained in example 1, where the spatial structure of the training data was accounted for indirectly by added a variety of raster distance measures, we can see that the RMSE score is slightly improved.\n\n## Multi-Target Predictions\n\n::: {#ddc87c2a .cell execution_count=10}\n``` {.python .cell-code}\npreds = stack.predict(model)\npreds.rename(\n {old: new for old, new in zip(preds.names, ['lead', 'cadmium', 'copper', 'zinc', 'om'])},\n in_place=True\n)\npreds.lead.cmap = 'rainbow'\npreds.cadmium.cmap = 'rainbow'\npreds.copper.cmap = 'rainbow'\npreds.zinc.cmap = 'rainbow'\npreds.om.cmap = 'rainbow'\n```\n:::\n\n\n::: {#911ae7fb .cell execution_count=11}\n``` {.python .cell-code}\npreds.plot(out_shape=(200, 200), title_fontsize=14, figsize=(10, 8))\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](spatial-features_files/figure-html/cell-12-output-1.png){width=822 height=634}\n:::\n:::\n\n\n", - "supporting": [ - "spatial-features_files" - ], - "filters": [], - "includes": {} - } +{ + "hash": "fb14d013abd3b63ee2a6abfe3f924f31", + "result": { + "engine": "jupyter", + "markdown": "---\ntitle: Incorporating Spatial Autocorrelation into Spatial Predictions\nformat:\n html:\n code-fold: false\n---\n\nSimilarly to example 1, we are using the meuse dataset again to perform a multi-target prediction of soil properties using a regression model. However, in this case we will attempt to account for spatial autocorrelation in the model directly by generating new features that are based on the distance-weighted means of surrounding spatial locations.\n\n::: {#0bcf6e26 .cell execution_count=1}\n``` {.python .cell-code}\nimport geopandas as gpd\nimport numpy as np\nfrom tempfile import NamedTemporaryFile\nfrom pyspatialml import Raster\nimport pyspatialml.datasets.meuse as ms\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\n```\n:::\n\n\n## Preparing the Raster Predictors\n\nImport the raster predictors from the `pyspatialml.datasets.meuse` module:\n\n::: {#ebe6123b .cell execution_count=2}\n``` {.python .cell-code}\npredictor_files = ms.predictors\ntraining_pts_file = ms.meuse\nstack = Raster(predictor_files)\nstack.drop('ffreq')\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nRaster Object Containing 11 Layers\n attribute values\n0 names [chnl_dist, dem, dist, landimg2, landimg3, lan...\n1 files [/Users/stevenpawley/GitHub/Pyspatialml/pyspat...\n2 rows 104\n3 cols 78\n4 res (40.0, 40.0)\n5 nodatavals [-99999.0, -99999.0, -1.0, -1.0, -1.0, -1.0, -...\n```\n:::\n\n::: {.cell-output .cell-output-display execution_count=2}\n```\n\n```\n:::\n:::\n\n\nIn order to generate new features from surrounding spatial locations, we need their x,y coordinates, which will will add to the stack of the raster predictors using the `pyspatialml.preprocessing.xy_coordinates` function:\n\n::: {#0393e2ac .cell execution_count=3}\n``` {.python .cell-code}\nfrom pyspatialml.preprocessing import xy_coordinates\n\nxy_layers = xy_coordinates(stack.iloc[0], NamedTemporaryFile(suffix=\".tif\").name)\nstack = stack.append(xy_layers, in_place=False)\n```\n:::\n\n\nQuickly plot the raster predictors:\n\n::: {#cba259ea .cell execution_count=4}\n``` {.python .cell-code}\nmpl.style.use('seaborn-v0_8')\naxs = stack.plot(figsize=(9, 7))\nax = axs.flatten()[10]\nim = ax.images\nim[0].colorbar.set_ticks([1,2,3])\nax = axs.flatten()[8]\nax.tick_params(axis='x', labelrotation=65)\n\nplt.tight_layout()\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](spatial-features_files/figure-html/cell-5-output-1.png){width=817 height=663}\n:::\n:::\n\n\n## Extract the Training Data\n\nSpatially query the raster predictors at the training point locations:\n\n::: {#06d01eb2 .cell execution_count=5}\n``` {.python .cell-code}\ntraining_pts = gpd.read_file(training_pts_file)\ntraining_df = stack.extract_vector(gdf=training_pts)\n\ntraining_df.index = training_df.index.get_level_values(\"geometry_idx\")\ntraining_df = training_df.merge(\n training_pts.loc[:, (\"lead\", \"cadmium\", \"copper\", \"zinc\", \"om\")], \n left_index=True, \n right_index=True\n) \ntraining_df = training_df.dropna()\n```\n:::\n\n\nSplit the response/target variables from the predictors:\n\n::: {#3138a3c4 .cell execution_count=6}\n``` {.python .cell-code}\nX = training_df.loc[:, stack.names].values\ny = training_df.loc[:, ['lead', 'cadmium', 'copper', 'zinc', 'om']].values\n```\n:::\n\n\n## Develop a Spatially-Lagged Machine Learning Model\n\nAs well as using the ExtraTreeRegressor model which was also used in example 1, here we will use the custom `pyspatialml.estimators.SpatialLagRegressor` metalearner class to wrap the extratrees regressor into a model that adds a new feature based on the distance-weighted mean of spatially-proximal observations:\n\n::: {#e50ffc54 .cell execution_count=7}\n``` {.python .cell-code}\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.ensemble import ExtraTreesRegressor\nfrom sklearn.preprocessing import OneHotEncoder\nfrom sklearn.compose import ColumnTransformer\nfrom pyspatialml.transformers import KNNTransformer\nfrom sklearn.model_selection import cross_validate, KFold\nfrom sklearn.model_selection import GridSearchCV\n\n# define regressor\net = ExtraTreesRegressor(n_estimators=500, n_jobs=-1, random_state=1234)\n\nsoil_index = list(stack.names).index(\"soil\")\nxy_indexes = [list(stack.names).index(i) for i in [\"x_coordinates\", \"y_coordinates\"]]\n\npreproc = ColumnTransformer([\n ('ohe', OneHotEncoder(categories='auto', handle_unknown='ignore'), [soil_index]),\n ('lags', KNNTransformer(weights='distance', measure=\"mean\"), xy_indexes)\n], remainder='passthrough')\n\nwflow = Pipeline([\n ('preproc', preproc),\n ('regressor', et)\n])\n\nsearch_grid = {\"preproc__lags__n_neighbors\": [3, 5, 7, 9]}\ninner = KFold(n_splits=3, shuffle=True, random_state=1234)\nmodel = GridSearchCV(wflow, param_grid=search_grid, cv=inner, scoring=\"r2\")\n```\n:::\n\n\nFit the model and cross-validate:\n\n::: {#5d0230a7 .cell execution_count=8}\n``` {.python .cell-code}\nmodel = model.fit(X, y)\nmodel.best_params_\n```\n\n::: {.cell-output .cell-output-display execution_count=8}\n```\n{'preproc__lags__n_neighbors': 9}\n```\n:::\n:::\n\n\n::: {#4a619dd2 .cell execution_count=9}\n``` {.python .cell-code}\nouter = KFold(n_splits=10, shuffle=True, random_state=1234)\n\nscores = cross_validate(model, X, y, scoring='neg_mean_squared_error', cv=outer, n_jobs=1)\nrmse = np.sqrt(-scores['test_score']).mean()\n\nprint(\"Our RMSE score is {}\".format(rmse))\n```\n\n::: {.cell-output .cell-output-stdout}\n```\nOur RMSE score is 102.27495341202624\n```\n:::\n:::\n\n\nComparing the RMSE score the the score obtained in example 1, where the spatial structure of the training data was accounted for indirectly by added a variety of raster distance measures, we can see that the RMSE score is slightly improved.\n\n## Multi-Target Predictions\n\n::: {#ddc87c2a .cell execution_count=10}\n``` {.python .cell-code}\npreds = stack.predict(model)\npreds.rename(\n {old: new for old, new in zip(preds.names, ['lead', 'cadmium', 'copper', 'zinc', 'om'])},\n in_place=True\n)\npreds.lead.cmap = 'rainbow'\npreds.cadmium.cmap = 'rainbow'\npreds.copper.cmap = 'rainbow'\npreds.zinc.cmap = 'rainbow'\npreds.om.cmap = 'rainbow'\n```\n:::\n\n\n::: {#911ae7fb .cell execution_count=11}\n``` {.python .cell-code}\npreds.plot(out_shape=(200, 200), title_fontsize=14, figsize=(10, 8))\nplt.show()\n```\n\n::: {.cell-output .cell-output-display}\n![](spatial-features_files/figure-html/cell-12-output-1.png){width=822 height=634}\n:::\n:::\n\n\n", + "supporting": [ + "spatial-features_files" + ], + "filters": [], + "includes": {} + } } \ No newline at end of file diff --git a/_freeze/docs/transformers/execute-results/html.json b/_freeze/docs/transformers/execute-results/html.json index be08a23..7558378 100644 --- a/_freeze/docs/transformers/execute-results/html.json +++ b/_freeze/docs/transformers/execute-results/html.json @@ -1,12 +1,12 @@ -{ - "hash": "f438721f18ba534a03eaea64365fd148", - "result": { - "engine": "jupyter", - "markdown": "---\ntitle: Transformers\nformat:\n html:\n code-fold: false\n toc: true\n---\n\nThe transformers module contains classes that are used for spatial feature engineering.\n\n## Spatial Lag Transformer\n\nA transformer to create spatial lag variables by using a\nweighted mean/mode of the values of the K-neighboring observations. The\nweighted mean/mode of the surrounding observations are appended as a new\nfeature to the right-most column in the training data. The `measure` parameter\nshould be set to 'mode' for classification, and 'mean' for regression.\n\n```\nKNNTransformer(\n n_neighbors=7,\n weights=\"distance\",\n measure=\"mean\",\n radius=1.0,\n algorithm=\"auto\",\n leaf_size=30,\n metric=\"minkowski\",\n p=2,\n normalize=True,\n metric_params=None,\n kernel_params=None,\n n_jobs=1\n)\n```\n\n## GeoDistTransformer\n\nA common spatial feature engineering task is to create new features that\ndescribe the proximity to some reference locations. The GeoDistTransformer\ncan be used to add these features as part of a machine learning pipeline.\n\n```\nGeoDistTransformer(refs, log=False)\n```\n\nWhere `refs` are an array of coordinates of reference locations in\n(m, n-dimensional) order, such as\n{n_locations, x_coordinates, y_coordinates, ...} for as many dimensions as\nrequired. For example to calculate distances to a single x,y,z location:\n\n```\nrefs = [-57.345, -110.134, 1012]\n```\n\nAnd to calculate distances to three x,y reference locations:\n\n```\nrefs = [\n [-57.345, -110.134],\n [-56.345, -109.123],\n [-58.534, -112.123]\n]\n```\n\nThe supplied array has to have at least x,y coordinates with a\n(1, 2) shape for a single location.\n\n", - "supporting": [ - "transformers_files" - ], - "filters": [], - "includes": {} - } +{ + "hash": "f438721f18ba534a03eaea64365fd148", + "result": { + "engine": "jupyter", + "markdown": "---\ntitle: Transformers\nformat:\n html:\n code-fold: false\n toc: true\n---\n\nThe transformers module contains classes that are used for spatial feature engineering.\n\n## Spatial Lag Transformer\n\nA transformer to create spatial lag variables by using a\nweighted mean/mode of the values of the K-neighboring observations. The\nweighted mean/mode of the surrounding observations are appended as a new\nfeature to the right-most column in the training data. The `measure` parameter\nshould be set to 'mode' for classification, and 'mean' for regression.\n\n```\nKNNTransformer(\n n_neighbors=7,\n weights=\"distance\",\n measure=\"mean\",\n radius=1.0,\n algorithm=\"auto\",\n leaf_size=30,\n metric=\"minkowski\",\n p=2,\n normalize=True,\n metric_params=None,\n kernel_params=None,\n n_jobs=1\n)\n```\n\n## GeoDistTransformer\n\nA common spatial feature engineering task is to create new features that\ndescribe the proximity to some reference locations. The GeoDistTransformer\ncan be used to add these features as part of a machine learning pipeline.\n\n```\nGeoDistTransformer(refs, log=False)\n```\n\nWhere `refs` are an array of coordinates of reference locations in\n(m, n-dimensional) order, such as\n{n_locations, x_coordinates, y_coordinates, ...} for as many dimensions as\nrequired. For example to calculate distances to a single x,y,z location:\n\n```\nrefs = [-57.345, -110.134, 1012]\n```\n\nAnd to calculate distances to three x,y reference locations:\n\n```\nrefs = [\n [-57.345, -110.134],\n [-56.345, -109.123],\n [-58.534, -112.123]\n]\n```\n\nThe supplied array has to have at least x,y coordinates with a\n(1, 2) shape for a single location.\n\n", + "supporting": [ + "transformers_files" + ], + "filters": [], + "includes": {} + } } \ No newline at end of file diff --git a/_freeze/site_libs/clipboard/clipboard.min.js b/_freeze/site_libs/clipboard/clipboard.min.js index 1103f81..9f97edb 100644 --- a/_freeze/site_libs/clipboard/clipboard.min.js +++ b/_freeze/site_libs/clipboard/clipboard.min.js @@ -1,7 +1,7 @@ -/*! - * clipboard.js v2.0.11 - * https://clipboardjs.com/ - * - * Licensed MIT © Zeno Rocha - */ +/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ !function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return b}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),r=n.n(e);function c(t){try{return document.execCommand(t)}catch(t){return}}var a=function(t){t=r()(t);return c("cut"),t};function o(t,e){var n,o,t=(n=t,o="rtl"===document.documentElement.getAttribute("dir"),(t=document.createElement("textarea")).style.fontSize="12pt",t.style.border="0",t.style.padding="0",t.style.margin="0",t.style.position="absolute",t.style[o?"right":"left"]="-9999px",o=window.pageYOffset||document.documentElement.scrollTop,t.style.top="".concat(o,"px"),t.setAttribute("readonly",""),t.value=n,t);return e.container.appendChild(t),e=r()(t),c("copy"),t.remove(),e}var f=function(t){var e=1 - - - + + + + diff --git a/docs/quickstart.qmd b/docs/guide.qmd similarity index 96% rename from docs/quickstart.qmd rename to docs/guide.qmd index ebaa814..e8e2b1d 100644 --- a/docs/quickstart.qmd +++ b/docs/guide.qmd @@ -1,169 +1,169 @@ ---- -title: "Quick start" -format: - html: - code-fold: false - toc: true -jupyter: python3 ---- - -## Initiating a Raster Object - -We are going to use a set of Landsat 7 bands contained within the nc example -data: - -```{python} -from pyspatialml import Raster -import pyspatialml.datasets.nc as nc -import matplotlib.pyplot as plt - -predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] -``` - -These raster datasets are aligned in terms of their extent and coordinate -reference systems. We can 'stack' these into a Raster class so that we can -perform machine learning related operations on the set of rasters: - -```{python} -stack = Raster(predictors) -``` - -When a Raster object is created, the names to each layer are automatically -created based on syntactically-correct versions of the file basenames: - -```{python} -stack.names -``` - -Color ramps and matplotlib.colors.Normalize objects can be assigned to each -RasterLayer in the object using the `cmap` and `norm` attributes for -convenient in plotting: - -```{python} -stack.lsat7_2000_10.cmap = "Blues" -stack.lsat7_2000_20.cmap = "Greens" -stack.lsat7_2000_30.cmap = "Reds" -stack.lsat7_2000_40.cmap = "RdPu" -stack.lsat7_2000_50.cmap = "autumn" -stack.lsat7_2000_70.cmap = "hot" - -stack.plot( - title_fontsize=8, - label_fontsize=6, - legend_fontsize=6, - names=["B1", "B2", "B3", "B4", "B5", "B7"], - fig_kwds={"figsize": (8, 4)}, - subplots_kwds={"wspace": 0.3} -) -plt.show() -``` - -## Subsetting and Indexing - -Indexing of Raster objects is provided by several methods: - -The ``Raster[keys]`` method enables key-based indexing using a name of a -RasterLayer, or a list of names. Direct subsetting of a Raster object instance -returns a RasterLayer if only a single label is used, otherwise it always -returns a new Raster object containing only the selected layers. - -The ``Raster.iloc[int, list, tuple, slice]`` method allows a Raster object -instance to be subset using integer-based indexing or slicing. The ``iloc`` -method returns a RasterLayer object if only a single index is used, otherwise -it always returns a new Raster object containing only the selected layers. - -Subsetting of a Raster object instance can also occur by using attribute names -in the form of ``Raster.name_of_layer``. Because only a single RasterLayer can -be subset at one time using this approach, a RasterLayer object is always -returned. - -Examples of methods to subset a Raster object: - -```{python} -# subset based on position -single_layer = stack.iloc[0] - -# subset using a slice -new_raster_obj = stack.iloc[0:3] - -# subset using labels -single_layer = stack['lsat7_2000_10'] -single_layer = stack.lsat7_2000_10 - -# list or tuple of keys -new_raster_obj = stack[('lsat7_2000_10', 'lsat7_2000_20')] -``` - -Iterate through RasterLayers individually: - -```{python} -for name, layer in stack.items(): - print(name, layer) -``` - -Replace a RasterLayer with another: - -```{python} -stack.iloc[0] = Raster(nc.band7).iloc[0] - -stack.iloc[0].plot() -plt.show() -``` - -## Appending and Dropping Layers - -Append layers from another Raster to the stack. Duplicate names are -automatically given a suffix. - -```{python} -stack.append(Raster(nc.band7), in_place=True) -stack.names -``` - -Rename RasterLayers using a dict of old_name : new_name pairs: - -```{python} -stack.names -stack.rename({'lsat7_2000_30': 'new_name'}, in_place=True) -stack.names -stack.new_name -stack['new_name'] -``` - -Drop a RasterLayer: - -```{python} -stack.names -stack.drop(labels='lsat7_2000_70_1', in_place=True) -stack.names -``` - -## Integration with Pandas - -Data from a Raster object can converted into a `Pandas.DataDrame`, with each -pixel representing by a row, and columns reflecting the x, y coordinates and -the values of each RasterLayer in the Raster object: - -```{python} -import pandas as pd - -df = stack.to_pandas(max_pixels=50000, resampling='nearest') -df.head() -``` - -The original raster is up-sampled based on max_pixels and the resampling -method, which uses all of resampling methods available in the underlying -rasterio library for decimated reads. - -## Saving a Raster to File - -Save a Raster: - -```{python} -import tempfile - -tmp_tif = tempfile.NamedTemporaryFile().name + '.tif' -newstack = stack.write(file_path=tmp_tif, nodata=-9999) -newstack.new_name.read() -newstack = None -``` +--- +title: "Quick start" +format: + html: + code-fold: false + toc: true +jupyter: python3 +--- + +## Initiating a Raster Object + +We are going to use a set of Landsat 7 bands contained within the nc example +data: + +```{python} +from pyspatialml import Raster +import pyspatialml.datasets.nc as nc +import matplotlib.pyplot as plt + +predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] +``` + +These raster datasets are aligned in terms of their extent and coordinate +reference systems. We can 'stack' these into a Raster class so that we can +perform machine learning related operations on the set of rasters: + +```{python} +stack = Raster(predictors) +``` + +When a Raster object is created, the names to each layer are automatically +created based on syntactically-correct versions of the file basenames: + +```{python} +stack.names +``` + +Color ramps and matplotlib.colors.Normalize objects can be assigned to each +RasterLayer in the object using the `cmap` and `norm` attributes for +convenient in plotting: + +```{python} +stack.lsat7_2000_10.cmap = "Blues" +stack.lsat7_2000_20.cmap = "Greens" +stack.lsat7_2000_30.cmap = "Reds" +stack.lsat7_2000_40.cmap = "RdPu" +stack.lsat7_2000_50.cmap = "autumn" +stack.lsat7_2000_70.cmap = "hot" + +stack.plot( + title_fontsize=8, + label_fontsize=6, + legend_fontsize=6, + names=["B1", "B2", "B3", "B4", "B5", "B7"], + fig_kwds={"figsize": (8, 4)}, + subplots_kwds={"wspace": 0.3} +) +plt.show() +``` + +## Subsetting and Indexing + +Indexing of Raster objects is provided by several methods: + +The ``Raster[keys]`` method enables key-based indexing using a name of a +RasterLayer, or a list of names. Direct subsetting of a Raster object instance +returns a RasterLayer if only a single label is used, otherwise it always +returns a new Raster object containing only the selected layers. + +The ``Raster.iloc[int, list, tuple, slice]`` method allows a Raster object +instance to be subset using integer-based indexing or slicing. The ``iloc`` +method returns a RasterLayer object if only a single index is used, otherwise +it always returns a new Raster object containing only the selected layers. + +Subsetting of a Raster object instance can also occur by using attribute names +in the form of ``Raster.name_of_layer``. Because only a single RasterLayer can +be subset at one time using this approach, a RasterLayer object is always +returned. + +Examples of methods to subset a Raster object: + +```{python} +# subset based on position +single_layer = stack.iloc[0] + +# subset using a slice +new_raster_obj = stack.iloc[0:3] + +# subset using labels +single_layer = stack['lsat7_2000_10'] +single_layer = stack.lsat7_2000_10 + +# list or tuple of keys +new_raster_obj = stack[('lsat7_2000_10', 'lsat7_2000_20')] +``` + +Iterate through RasterLayers individually: + +```{python} +for name, layer in stack.items(): + print(name, layer) +``` + +Replace a RasterLayer with another: + +```{python} +stack.iloc[0] = Raster(nc.band7).iloc[0] + +stack.iloc[0].plot() +plt.show() +``` + +## Appending and Dropping Layers + +Append layers from another Raster to the stack. Duplicate names are +automatically given a suffix. + +```{python} +stack.append(Raster(nc.band7), in_place=True) +stack.names +``` + +Rename RasterLayers using a dict of old_name : new_name pairs: + +```{python} +stack.names +stack.rename({'lsat7_2000_30': 'new_name'}, in_place=True) +stack.names +stack.new_name +stack['new_name'] +``` + +Drop a RasterLayer: + +```{python} +stack.names +stack.drop(labels='lsat7_2000_70_1', in_place=True) +stack.names +``` + +## Integration with Pandas + +Data from a Raster object can converted into a `Pandas.DataDrame`, with each +pixel representing by a row, and columns reflecting the x, y coordinates and +the values of each RasterLayer in the Raster object: + +```{python} +import pandas as pd + +df = stack.to_pandas(max_pixels=50000, resampling='nearest') +df.head() +``` + +The original raster is up-sampled based on max_pixels and the resampling +method, which uses all of resampling methods available in the underlying +rasterio library for decimated reads. + +## Saving a Raster to File + +Save a Raster: + +```{python} +import tempfile + +tmp_tif = tempfile.NamedTemporaryFile().name + '.tif' +newstack = stack.write(file_path=tmp_tif, nodata=-9999) +newstack.new_name.read() +newstack = None +``` diff --git a/docs/installation.qmd b/docs/installation.qmd index a465437..70fdc72 100644 --- a/docs/installation.qmd +++ b/docs/installation.qmd @@ -1,24 +1,24 @@ ---- -title: "Installation" -format: - html: - code-fold: false -jupyter: python3 ---- - -Pyspatialml is available on PyPI and can be installed in the usual manner with: - -```{python} -#| eval: false -pip install Pyspatialml -``` - -The development version, which is more up-to-date with changes to the package -especially during these earlier stages of development, can be installed -directly via: - -```{python} -#| eval: false -pip install git+https://github.com/stevenpawley/Pyspatialml -``` - +--- +title: "Installation" +format: + html: + code-fold: false +jupyter: python3 +--- + +Pyspatialml is available on PyPI and can be installed in the usual manner with: + +```{python} +#| eval: false +pip install Pyspatialml +``` + +The development version, which is more up-to-date with changes to the package +especially during these earlier stages of development, can be installed +directly via: + +```{python} +#| eval: false +pip install git+https://github.com/stevenpawley/Pyspatialml +``` + diff --git a/docs/landcover.qmd b/docs/landcover.qmd index 7d55b02..d9d9b80 100644 --- a/docs/landcover.qmd +++ b/docs/landcover.qmd @@ -1,210 +1,210 @@ ---- -title: "Landcover classification" -format: - html: - code-fold: false -jupyter: python3 ---- - -Landcover classification is a common task in remote sensing. This example -demonstrates how to extract training data from a raster and vector data, train -a classifier, and predict landcover classes on a raster. - -## Data - -The data used in this example is from the Landsat 7 ETM+ sensor, and represents -an extract of data derived from the GRASS GIS North Carolina example dataset. -The data consists of 6 bands (1, 2, 3, 4, 5, 7) and labelled pixels. The labelled -pixels are used as training data for the classifier. The data is stored in the -`pyspatialml.datasets` module. - -## Extraction Training Data - -Load some training data in the form of polygons, points and labelled pixels in -``geopandas.GeoDataFrame`` objects. We will also generate some line geometries -by converting the polygon boundaries into linestrings. All of these geometry -types can be used to spatially query pixel values in a Raster object, however -each GeoDataFrame must contain only one type of geometry (i.e. either shapely -points, polygons or linestrings). - -```{python} -from pyspatialml import Raster -from pyspatialml.datasets import nc -from copy import deepcopy -import os -import numpy as np -import tempfile -import geopandas -import rasterio.plot -import matplotlib.pyplot as plt - -training_py = geopandas.read_file(nc.polygons) -training_pt = geopandas.read_file(nc.points) -training_px = rasterio.open(nc.labelled_pixels) -training_lines = deepcopy(training_py) -training_lines['geometry'] = training_lines.geometry.boundary -``` - -Show training data points and a single raster band using numpy and matplotlib: - -```{python} -predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] -stack = Raster(predictors) - -fig, ax = plt.subplots(figsize=(9, 9)) -stack.lsat7_2000_70.plot(ax=ax) - -training_py.plot(column="label", ax=ax, legend=True) -plt.show() -``` - -Pixel values in the Raster object can be spatially queried using the -`extract_vector` and `extract_raster` methods. In addition, the -`extract_xy` method can be used to query pixel values using a 2d array of x -and y coordinates. - -The `extract_vector` method accepts a Geopandas GeoDataFrame as the -`gdf` argument. For GeoDataFrames containing shapely point geometries, the -closest pixel to each point is sampled. For shapely polygon geometries, all -pixels whose centres are inside the polygon are sampled. For shapely -linestring geometries, every pixel touched by the line is sampled. For all -geometry types, pixel values are queries for each geometry separately. This -means that overlapping polygons or points that fall within the same pixel with -cause the same pixel to be sampled multiple times. - -By default, the extract functions return a Geopandas GeoDataFrame of point -geometries and the DataFrame containing the extracted pixels, with the column -names set by the names of the raster datasets in the Raster object. The user -can also use the `return_array=True` argument, which instead of returning a -DataFrame will return three masked numpy arrays (ids, X, xy) containing the -geodataframe index positions, extracted pixel values, and the spatial -coordinates of the sampled pixels. These arrays are masked arrays. - -The `extract_raster` method can also be used to spatially query pixel values -from a Raster object using another raster containing labelled pixels. This -raster has to be spatially aligned with the Raster object. The values of the -labelled pixels are returned along with the queried pixel values. - -```{python} -# Extract data from rasters at the training point locations: -df_points = stack.extract_vector(training_pt) -df_polygons = stack.extract_vector(training_py) -df_lines = stack.extract_vector(training_lines) -``` - -For any vector features, a GeoDataFrame is returned containing the extracted -pixel values. A pandas.MultiIndex is used to relate the pixels back to the -original geometries, with the `pixel_idx` index referring to the index of each -pixel, and the `geometry_idx` referring to the index of the original geometry -in the supplied GeoDataFrame. The pixel values themselves are represented as -`shapely.geometry.Point` objects. These will need to be joined back with the -columns of the vector features to get the labelled classes. Here we will join -the extracted pixels using the "id" column and the GeoDataFrame index of the -vector features: - -```{python} -# Join the extracted values with other columns from the training data -df_points["id"] = training_pt["id"].values -df_points = df_points.dropna() -df_points.head() - -df_polygons = df_polygons.merge( - right=training_py.loc[:, ["label", "id"]], - left_on="geometry_idx", - right_on="index", - right_index=True -) -``` - -If the training data is from labelled pixels in a raster, then the extracted -data will contain a "value" column that contains the pixel labels: - -```{python} -df_raster = stack.extract_raster(training_px) -``` - -## Model Training - -Next we can train a logistic regression classifier: - -```{python} -from sklearn.linear_model import LogisticRegressionCV -from sklearn.preprocessing import StandardScaler -from sklearn.pipeline import Pipeline -from sklearn.model_selection import cross_validate - -# define the classifier with standardization of the input features in a -# pipeline -lr = Pipeline( - [('scaling', StandardScaler()), - ('classifier', LogisticRegressionCV(n_jobs=-1))]) - -# remove NaNs from training data -df_polygons = df_polygons.dropna() - -# fit the classifier -X = df_polygons.drop(columns=["id", "label", "geometry"]).values -y = df_polygons["id"].values -lr.fit(X, y) -``` - -After defining a classifier, a typical step consists of performing a -cross-validation to evaluate the performance of the model. Scikit-learn -provides the cross_validate function for this purpose. In comparison to -non-spatial data, spatial data can be spatially correlated, which potentially -can mean that geographically proximal samples may not represent truely -independent samples if they are within the autocorrelation range of some of the -predictors. This will lead to overly optimistic performance measures if samples -in the training dataset / cross-validation partition are strongly spatially -correlated with samples in the test dataset / cross-validation partition. - -In this case, performing cross-validation using groups is useful, because these -groups can represent spatial clusters of training samples, and samples from the -same group will never occur in both the training and test partitions of a -cross-validation. Here we can use the polygon indices as the groups, i.e. -pixels within the same polygon will not be split into training and test -partitions: - -```{python} -scores = cross_validate( - estimator=lr, - X=X, - y=y, - groups=df_polygons.index.droplevel("pixel_idx"), - scoring="accuracy", - cv=3, - n_jobs=1, -) -np.round(scores['test_score'].mean(), 2) -``` - -## Raster Prediction - -Prediction on the Raster object is performed using the `predict` method. -The `estimator` is the only required argument. If the `file_path` argument -is not specified then the result is automatically written to a temporary file. -The predict method returns an rasterio.io.DatasetReader object which is open. - -```{python} -# prediction -result = stack.predict(estimator=lr, dtype='int16', nodata=0) -result_probs = stack.predict_proba(estimator=lr) - -# plot classification result -result.iloc[0].cmap = "Dark2" -result.iloc[0].categorical = True - -result.plot() -plt.show() -``` - -The `predict_proba` method can be used to output class probabilities as -a multi-band raster (a band for each class probability). In the latter case, -`indexes` can also be supplied if you only want to output the probabilities -for a particular class, or list of classes, by supplying the indices of those -classes: - -```{python} -result_probs.plot() -plt.show() -``` +--- +title: "Landcover classification" +format: + html: + code-fold: false +jupyter: python3 +--- + +Landcover classification is a common task in remote sensing. This example +demonstrates how to extract training data from a raster and vector data, train +a classifier, and predict landcover classes on a raster. + +## Data + +The data used in this example is from the Landsat 7 ETM+ sensor, and represents +an extract of data derived from the GRASS GIS North Carolina example dataset. +The data consists of 6 bands (1, 2, 3, 4, 5, 7) and labelled pixels. The labelled +pixels are used as training data for the classifier. The data is stored in the +`pyspatialml.datasets` module. + +## Extraction Training Data + +Load some training data in the form of polygons, points and labelled pixels in +``geopandas.GeoDataFrame`` objects. We will also generate some line geometries +by converting the polygon boundaries into linestrings. All of these geometry +types can be used to spatially query pixel values in a Raster object, however +each GeoDataFrame must contain only one type of geometry (i.e. either shapely +points, polygons or linestrings). + +```{python} +from pyspatialml import Raster +from pyspatialml.datasets import nc +from copy import deepcopy +import os +import numpy as np +import tempfile +import geopandas +import rasterio.plot +import matplotlib.pyplot as plt + +training_py = geopandas.read_file(nc.polygons) +training_pt = geopandas.read_file(nc.points) +training_px = rasterio.open(nc.labelled_pixels) +training_lines = deepcopy(training_py) +training_lines['geometry'] = training_lines.geometry.boundary +``` + +Show training data points and a single raster band using numpy and matplotlib: + +```{python} +predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] +stack = Raster(predictors) + +fig, ax = plt.subplots(figsize=(9, 9)) +stack.lsat7_2000_70.plot(ax=ax) + +training_py.plot(column="label", ax=ax, legend=True) +plt.show() +``` + +Pixel values in the Raster object can be spatially queried using the +`extract_vector` and `extract_raster` methods. In addition, the +`extract_xy` method can be used to query pixel values using a 2d array of x +and y coordinates. + +The `extract_vector` method accepts a Geopandas GeoDataFrame as the +`gdf` argument. For GeoDataFrames containing shapely point geometries, the +closest pixel to each point is sampled. For shapely polygon geometries, all +pixels whose centres are inside the polygon are sampled. For shapely +linestring geometries, every pixel touched by the line is sampled. For all +geometry types, pixel values are queries for each geometry separately. This +means that overlapping polygons or points that fall within the same pixel with +cause the same pixel to be sampled multiple times. + +By default, the extract functions return a Geopandas GeoDataFrame of point +geometries and the DataFrame containing the extracted pixels, with the column +names set by the names of the raster datasets in the Raster object. The user +can also use the `return_array=True` argument, which instead of returning a +DataFrame will return three masked numpy arrays (ids, X, xy) containing the +geodataframe index positions, extracted pixel values, and the spatial +coordinates of the sampled pixels. These arrays are masked arrays. + +The `extract_raster` method can also be used to spatially query pixel values +from a Raster object using another raster containing labelled pixels. This +raster has to be spatially aligned with the Raster object. The values of the +labelled pixels are returned along with the queried pixel values. + +```{python} +# Extract data from rasters at the training point locations: +df_points = stack.extract_vector(training_pt) +df_polygons = stack.extract_vector(training_py) +df_lines = stack.extract_vector(training_lines) +``` + +For any vector features, a GeoDataFrame is returned containing the extracted +pixel values. A pandas.MultiIndex is used to relate the pixels back to the +original geometries, with the `pixel_idx` index referring to the index of each +pixel, and the `geometry_idx` referring to the index of the original geometry +in the supplied GeoDataFrame. The pixel values themselves are represented as +`shapely.geometry.Point` objects. These will need to be joined back with the +columns of the vector features to get the labelled classes. Here we will join +the extracted pixels using the "id" column and the GeoDataFrame index of the +vector features: + +```{python} +# Join the extracted values with other columns from the training data +df_points["id"] = training_pt["id"].values +df_points = df_points.dropna() +df_points.head() + +df_polygons = df_polygons.merge( + right=training_py.loc[:, ["label", "id"]], + left_on="geometry_idx", + right_on="index", + right_index=True +) +``` + +If the training data is from labelled pixels in a raster, then the extracted +data will contain a "value" column that contains the pixel labels: + +```{python} +df_raster = stack.extract_raster(training_px) +``` + +## Model Training + +Next we can train a logistic regression classifier: + +```{python} +from sklearn.linear_model import LogisticRegressionCV +from sklearn.preprocessing import StandardScaler +from sklearn.pipeline import Pipeline +from sklearn.model_selection import cross_validate + +# define the classifier with standardization of the input features in a +# pipeline +lr = Pipeline( + [('scaling', StandardScaler()), + ('classifier', LogisticRegressionCV(n_jobs=-1))]) + +# remove NaNs from training data +df_polygons = df_polygons.dropna() + +# fit the classifier +X = df_polygons.drop(columns=["id", "label", "geometry"]).values +y = df_polygons["id"].values +lr.fit(X, y) +``` + +After defining a classifier, a typical step consists of performing a +cross-validation to evaluate the performance of the model. Scikit-learn +provides the cross_validate function for this purpose. In comparison to +non-spatial data, spatial data can be spatially correlated, which potentially +can mean that geographically proximal samples may not represent truely +independent samples if they are within the autocorrelation range of some of the +predictors. This will lead to overly optimistic performance measures if samples +in the training dataset / cross-validation partition are strongly spatially +correlated with samples in the test dataset / cross-validation partition. + +In this case, performing cross-validation using groups is useful, because these +groups can represent spatial clusters of training samples, and samples from the +same group will never occur in both the training and test partitions of a +cross-validation. Here we can use the polygon indices as the groups, i.e. +pixels within the same polygon will not be split into training and test +partitions: + +```{python} +scores = cross_validate( + estimator=lr, + X=X, + y=y, + groups=df_polygons.index.droplevel("pixel_idx"), + scoring="accuracy", + cv=3, + n_jobs=1, +) +np.round(scores['test_score'].mean(), 2) +``` + +## Raster Prediction + +Prediction on the Raster object is performed using the `predict` method. +The `estimator` is the only required argument. If the `file_path` argument +is not specified then the result is automatically written to a temporary file. +The predict method returns an rasterio.io.DatasetReader object which is open. + +```{python} +# prediction +result = stack.predict(estimator=lr, dtype='int16', nodata=0) +result_probs = stack.predict_proba(estimator=lr) + +# plot classification result +result.iloc[0].cmap = "Dark2" +result.iloc[0].categorical = True + +result.plot() +plt.show() +``` + +The `predict_proba` method can be used to output class probabilities as +a multi-band raster (a band for each class probability). In the latter case, +`indexes` can also be supplied if you only want to output the probabilities +for a particular class, or list of classes, by supplying the indices of those +classes: + +```{python} +result_probs.plot() +plt.show() +``` diff --git a/docs/multitarget-regression-soil-properties.qmd b/docs/multitarget-regression-soil-properties.qmd index ce47f2f..16b39dd 100644 --- a/docs/multitarget-regression-soil-properties.qmd +++ b/docs/multitarget-regression-soil-properties.qmd @@ -1,284 +1,284 @@ ---- -title: "Multi-Target Spatial Prediction using the Meuse Dataset" -format: - html: - code-fold: false -jupyter: python3 ---- - -Here we are using the meuse dataset which is included in the pyspatialml package as an example of performing a spatial model and prediction. We can access the datasets using the `pyspatialml.datasets` module: - -```{python} -from copy import deepcopy -from tempfile import NamedTemporaryFile -import geopandas as gpd -import numpy as np -from pyspatialml import Raster -from pyspatialml.preprocessing import xy_coordinates, distance_to_corners -import pyspatialml.datasets.meuse as ms - -import matplotlib as mpl -import matplotlib.pyplot as plt -from matplotlib import cm -``` - -```{python} -predictor_files = ms.predictors -training_pts_file = ms.meuse -``` - -```{python} -stack = Raster(predictor_files) -stack.names -``` - -Pyspatialml implements pandas-style indexing for `Raster` objects, using `Raster.loc` to index by the name of the raster, and `Raster.iloc` to select by index. This method also accepts slices. Label-based indexing is also provided directly by the __getattr_ magic method, i.e. `Raster[name]` or for multiple layers `Raster[(names)]`. - -For example we can remove layers from Raster object using the `Raster.drop` method, or by subsetting the raster: - -```{python} -stack.drop('ffreq') -``` - -We can store matplotlib cmaps as an attribute within each layer in the Raster: - -```{python} -stack.chnl_dist.cmap = 'RdBu' -stack.dem.cmap = 'terrain' -stack.dist.cmap = 'Reds' -stack.landimg2.cmap = 'Greys' -stack.landimg3.cmap = 'Greys' -stack.landimg4.cmap = 'Greys' -stack.landimg4.cmap = 'Greys' -stack.mrvbf.cmap = 'jet' -stack.rsp.cmap = 'gnuplot2' -stack.slope.cmap = 'PuRd' -stack.soil.cmap = 'Set2' -stack.twi.cmap = 'coolwarm' -``` - -Plot the predictors in the Raster object as a raster matrix: - -```{python} -mpl.style.use('seaborn-v0_8') -axs = stack.plot(figsize=(9, 7)) -ax = axs.flatten()[10] -im = ax.images -im[0].colorbar.set_ticks([1,2,3]) -ax = axs.flatten()[8] -ax.tick_params(axis='x', labelrotation=65) - -plt.tight_layout() -plt.show() -``` - -## Feature Engineering - -We want the prediction results to be dependent on the spatial locations of the training data. So to include spatial information, coordinate grids can be generated and added to the Raster object: - -```{python} -xy_layer = xy_coordinates( - layer=stack.iloc[0], - file_path=NamedTemporaryFile(suffix=".tif").name -) -``` - -```{python} -xy_layer = xy_coordinates( - layer=stack.iloc[0], - file_path=NamedTemporaryFile(suffix=".tif").name -) - -edms = distance_to_corners( - layer=stack.iloc[0], - file_path=NamedTemporaryFile(suffix=".tif").name -) -edms.rename( - {old: new for (old, new) in zip(edms.names, ["tl", "tr", "bl", "br", "c"])}, - in_place=True -) - -edms.plot() -plt.show() -``` - -Append them to the Raster object: - -```{python} -stack = stack.append([xy_layer, edms]) -``` - -Plot the new predictors: - -```{python} -axs = stack.plot(figsize=(9, 7)) -ax = axs.flatten()[10] -im = ax.images -im[0].colorbar.set_ticks([1,2,3]) -ax = axs.flatten()[8] -ax.tick_params(axis='x', labelrotation=65) - -plt.tight_layout() -plt.show() -``` - -The area that is filled by some of the grids is different. This doesn't matter for the prediction because pixels in the Raster object that include some NaNs in some of the layers will be removed. However, the plots could potentially be given a cleaner look. We can use the Raster.intersect method to fix this: - -```{python} -stack = stack.intersect() -``` - -```{python} -axs = stack.plot(figsize=(9, 7)) -ax = axs.flatten()[10] -im = ax.images -im[0].colorbar.set_ticks([1,2,3]) -ax = axs.flatten()[8] -ax.tick_params(axis='x', labelrotation=65) - -plt.tight_layout() -plt.show() -``` - -## Read the Meuse Dataset - -```{python} -training_pts = gpd.read_file(training_pts_file) -training_pts.head() -``` - -Plot the training points: - -```{python} -from mpl_toolkits.axes_grid1 import make_axes_locatable - -fig, axs = plt.subplots(2, 3, figsize=(8.5, 7)) - -for i, (ax, target) in enumerate(zip(axs.ravel(), ['cadmium', 'copper', 'lead', 'zinc', 'om'])): - ax.set_title(target.title()) - divider = make_axes_locatable(ax) - cax = divider.append_axes("right", size="10%", pad=0.05) - training_pts.plot(column=target, legend=True, ax=ax, cax=cax, cmap='viridis') - - if i != 0: - ax.set_yticklabels([]) - - if i != 3: - ax.set_xticklabels([]) - else: - ax.tick_params(axis='x', labelrotation=65) - -fig.delaxes(axs.flatten()[i+1]) -plt.tight_layout() -plt.show() -``` - -## Extract Raster Values at the Training Point Locations - -Pixel values from a Raster object can be extracted using geometries within a geopandas.GeoDataFrame (points, lines, polygons) or by using labelled pixels from another raster with the same dimensions and crs. - -By default the extracted values are returned as a geopandas.GeoDataFrame that contains the data and the coordinates of the pixels: - -```{python} -training_df = stack.extract_vector(gdf=training_pts) - -training_df.index = training_df.index.get_level_values("geometry_idx") -training_df = training_df.merge( - training_pts.loc[:, ("lead", "cadmium", "copper", "zinc", "om")], - left_index=True, - right_index=True -) -``` - -```{python} -training_df = training_df.dropna() -training_df.head() -``` - -## Developing a Machine Learning Model - -Here we are going to create a machine learning pipeline that correctly handles categorical predictors via one-hot encoding: - -```{python} -stack.names -``` - -```{python} -from sklearn.pipeline import Pipeline -from sklearn.ensemble import ExtraTreesRegressor -from sklearn.preprocessing import OneHotEncoder -from sklearn.compose import ColumnTransformer - -soil_idx = [i for i, name in enumerate(stack.names) if name == 'soil'] - -trans = ColumnTransformer([ - ('ohe', OneHotEncoder(categories='auto', handle_unknown='ignore'), soil_idx) - ], remainder='passthrough') - -et = ExtraTreesRegressor(n_estimators=500, n_jobs=-1, random_state=1234) -et = Pipeline([ - ('preproc', trans), - ('regressor', et)]) -``` - -Now we can separate our response and predictor variables and train the model: - -```{python} -X = training_df.loc[:, stack.names] -y = training_df.loc[:, ['lead', 'cadmium', 'copper', 'zinc', 'om']] -et.fit(X, y) -``` - -To evaluate the performance of the model, we will use 10-fold cross validation: - -```{python} -from sklearn.model_selection import cross_validate, KFold - -outer = KFold(n_splits=10, shuffle=True, random_state=1234) -scores = cross_validate(et, X, y, scoring='neg_mean_squared_error', cv=10, n_jobs=1) -rmse = np.sqrt(-scores['test_score']).mean() - -print("Our RMSE score is {}".format(rmse)) -``` - -## Feature Importances - -```{python} -ohe_names = deepcopy(list(stack.names)) -ohe_names.insert(soil_idx[0], 'soil1') -ohe_names.insert(soil_idx[0], 'soil2') -ohe_names = np.array(ohe_names) -``` - -```{python} -mpl.style.use('ggplot') - -fimp = et.named_steps['regressor'].feature_importances_ - -fig, ax = plt.subplots(figsize=(4, 6)) -ax.barh(y=ohe_names[fimp.argsort()], width=fimp[fimp.argsort()]) -ax.set_xlabel('Feature Importance Score') -plt.show() -``` - -## Prediction on the Raster object - -```{python} -preds = stack.predict(et) -preds.rename( - {old: new for old, new in zip(preds.names, ['lead', 'cadmium', 'copper', 'zinc', 'om'])}, - in_place=True -) -preds.lead.cmap = 'rainbow' -preds.cadmium.cmap = 'rainbow' -preds.copper.cmap = 'rainbow' -preds.zinc.cmap = 'rainbow' -preds.om.cmap = 'rainbow' -``` - -Plot the results: - -```{python} -preds.plot(out_shape=(200, 200), title_fontsize=14, figsize=(10, 8)) -plt.show() -``` +--- +title: "Multi-Target Spatial Prediction using the Meuse Dataset" +format: + html: + code-fold: false +jupyter: python3 +--- + +Here we are using the meuse dataset which is included in the pyspatialml package as an example of performing a spatial model and prediction. We can access the datasets using the `pyspatialml.datasets` module: + +```{python} +from copy import deepcopy +from tempfile import NamedTemporaryFile +import geopandas as gpd +import numpy as np +from pyspatialml import Raster +from pyspatialml.preprocessing import xy_coordinates, distance_to_corners +import pyspatialml.datasets.meuse as ms + +import matplotlib as mpl +import matplotlib.pyplot as plt +from matplotlib import cm +``` + +```{python} +predictor_files = ms.predictors +training_pts_file = ms.meuse +``` + +```{python} +stack = Raster(predictor_files) +stack.names +``` + +Pyspatialml implements pandas-style indexing for `Raster` objects, using `Raster.loc` to index by the name of the raster, and `Raster.iloc` to select by index. This method also accepts slices. Label-based indexing is also provided directly by the __getattr_ magic method, i.e. `Raster[name]` or for multiple layers `Raster[(names)]`. + +For example we can remove layers from Raster object using the `Raster.drop` method, or by subsetting the raster: + +```{python} +stack.drop('ffreq') +``` + +We can store matplotlib cmaps as an attribute within each layer in the Raster: + +```{python} +stack.chnl_dist.cmap = 'RdBu' +stack.dem.cmap = 'terrain' +stack.dist.cmap = 'Reds' +stack.landimg2.cmap = 'Greys' +stack.landimg3.cmap = 'Greys' +stack.landimg4.cmap = 'Greys' +stack.landimg4.cmap = 'Greys' +stack.mrvbf.cmap = 'jet' +stack.rsp.cmap = 'gnuplot2' +stack.slope.cmap = 'PuRd' +stack.soil.cmap = 'Set2' +stack.twi.cmap = 'coolwarm' +``` + +Plot the predictors in the Raster object as a raster matrix: + +```{python} +mpl.style.use('seaborn-v0_8') +axs = stack.plot(figsize=(9, 7)) +ax = axs.flatten()[10] +im = ax.images +im[0].colorbar.set_ticks([1,2,3]) +ax = axs.flatten()[8] +ax.tick_params(axis='x', labelrotation=65) + +plt.tight_layout() +plt.show() +``` + +## Feature Engineering + +We want the prediction results to be dependent on the spatial locations of the training data. So to include spatial information, coordinate grids can be generated and added to the Raster object: + +```{python} +xy_layer = xy_coordinates( + layer=stack.iloc[0], + file_path=NamedTemporaryFile(suffix=".tif").name +) +``` + +```{python} +xy_layer = xy_coordinates( + layer=stack.iloc[0], + file_path=NamedTemporaryFile(suffix=".tif").name +) + +edms = distance_to_corners( + layer=stack.iloc[0], + file_path=NamedTemporaryFile(suffix=".tif").name +) +edms.rename( + {old: new for (old, new) in zip(edms.names, ["tl", "tr", "bl", "br", "c"])}, + in_place=True +) + +edms.plot() +plt.show() +``` + +Append them to the Raster object: + +```{python} +stack = stack.append([xy_layer, edms]) +``` + +Plot the new predictors: + +```{python} +axs = stack.plot(figsize=(9, 7)) +ax = axs.flatten()[10] +im = ax.images +im[0].colorbar.set_ticks([1,2,3]) +ax = axs.flatten()[8] +ax.tick_params(axis='x', labelrotation=65) + +plt.tight_layout() +plt.show() +``` + +The area that is filled by some of the grids is different. This doesn't matter for the prediction because pixels in the Raster object that include some NaNs in some of the layers will be removed. However, the plots could potentially be given a cleaner look. We can use the Raster.intersect method to fix this: + +```{python} +stack = stack.intersect() +``` + +```{python} +axs = stack.plot(figsize=(9, 7)) +ax = axs.flatten()[10] +im = ax.images +im[0].colorbar.set_ticks([1,2,3]) +ax = axs.flatten()[8] +ax.tick_params(axis='x', labelrotation=65) + +plt.tight_layout() +plt.show() +``` + +## Read the Meuse Dataset + +```{python} +training_pts = gpd.read_file(training_pts_file) +training_pts.head() +``` + +Plot the training points: + +```{python} +from mpl_toolkits.axes_grid1 import make_axes_locatable + +fig, axs = plt.subplots(2, 3, figsize=(8.5, 7)) + +for i, (ax, target) in enumerate(zip(axs.ravel(), ['cadmium', 'copper', 'lead', 'zinc', 'om'])): + ax.set_title(target.title()) + divider = make_axes_locatable(ax) + cax = divider.append_axes("right", size="10%", pad=0.05) + training_pts.plot(column=target, legend=True, ax=ax, cax=cax, cmap='viridis') + + if i != 0: + ax.set_yticklabels([]) + + if i != 3: + ax.set_xticklabels([]) + else: + ax.tick_params(axis='x', labelrotation=65) + +fig.delaxes(axs.flatten()[i+1]) +plt.tight_layout() +plt.show() +``` + +## Extract Raster Values at the Training Point Locations + +Pixel values from a Raster object can be extracted using geometries within a geopandas.GeoDataFrame (points, lines, polygons) or by using labelled pixels from another raster with the same dimensions and crs. + +By default the extracted values are returned as a geopandas.GeoDataFrame that contains the data and the coordinates of the pixels: + +```{python} +training_df = stack.extract_vector(gdf=training_pts) + +training_df.index = training_df.index.get_level_values("geometry_idx") +training_df = training_df.merge( + training_pts.loc[:, ("lead", "cadmium", "copper", "zinc", "om")], + left_index=True, + right_index=True +) +``` + +```{python} +training_df = training_df.dropna() +training_df.head() +``` + +## Developing a Machine Learning Model + +Here we are going to create a machine learning pipeline that correctly handles categorical predictors via one-hot encoding: + +```{python} +stack.names +``` + +```{python} +from sklearn.pipeline import Pipeline +from sklearn.ensemble import ExtraTreesRegressor +from sklearn.preprocessing import OneHotEncoder +from sklearn.compose import ColumnTransformer + +soil_idx = [i for i, name in enumerate(stack.names) if name == 'soil'] + +trans = ColumnTransformer([ + ('ohe', OneHotEncoder(categories='auto', handle_unknown='ignore'), soil_idx) + ], remainder='passthrough') + +et = ExtraTreesRegressor(n_estimators=500, n_jobs=-1, random_state=1234) +et = Pipeline([ + ('preproc', trans), + ('regressor', et)]) +``` + +Now we can separate our response and predictor variables and train the model: + +```{python} +X = training_df.loc[:, stack.names] +y = training_df.loc[:, ['lead', 'cadmium', 'copper', 'zinc', 'om']] +et.fit(X, y) +``` + +To evaluate the performance of the model, we will use 10-fold cross validation: + +```{python} +from sklearn.model_selection import cross_validate, KFold + +outer = KFold(n_splits=10, shuffle=True, random_state=1234) +scores = cross_validate(et, X, y, scoring='neg_mean_squared_error', cv=10, n_jobs=1) +rmse = np.sqrt(-scores['test_score']).mean() + +print("Our RMSE score is {}".format(rmse)) +``` + +## Feature Importances + +```{python} +ohe_names = deepcopy(list(stack.names)) +ohe_names.insert(soil_idx[0], 'soil1') +ohe_names.insert(soil_idx[0], 'soil2') +ohe_names = np.array(ohe_names) +``` + +```{python} +mpl.style.use('ggplot') + +fimp = et.named_steps['regressor'].feature_importances_ + +fig, ax = plt.subplots(figsize=(4, 6)) +ax.barh(y=ohe_names[fimp.argsort()], width=fimp[fimp.argsort()]) +ax.set_xlabel('Feature Importance Score') +plt.show() +``` + +## Prediction on the Raster object + +```{python} +preds = stack.predict(et) +preds.rename( + {old: new for old, new in zip(preds.names, ['lead', 'cadmium', 'copper', 'zinc', 'om'])}, + in_place=True +) +preds.lead.cmap = 'rainbow' +preds.cadmium.cmap = 'rainbow' +preds.copper.cmap = 'rainbow' +preds.zinc.cmap = 'rainbow' +preds.om.cmap = 'rainbow' +``` + +Plot the results: + +```{python} +preds.plot(out_shape=(200, 200), title_fontsize=14, figsize=(10, 8)) +plt.show() +``` diff --git a/docs/plotting.qmd b/docs/plotting.qmd index ac14ee0..ccfb02c 100644 --- a/docs/plotting.qmd +++ b/docs/plotting.qmd @@ -1,69 +1,69 @@ ---- -title: "Plotting" -format: - html: - code-fold: false - toc: true -jupyter: python3 ---- - -Both `Raster` and `RasterLayer` objects include basic plotting methods. The -plot method for a `RasterLayer` object produces a single raster plot using the -`matplotlib.pyplot.imshow` method. - -For convenience, plot settings such as color ramps and stretches can also be -set for each RasterLayer using the `RasterLayer.cmap` that support matplotlib -cmap's, and the `RasterLayer.norm` attribute to associate a -`matplotlib.colors.Normalize` stretch with each RasterLayer: - -To plot a single RasterLayer: - -```{python} -from pyspatialml import Raster -from pyspatialml.datasets import nc -import matplotlib.pyplot as plt - -stack = Raster([nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]) - -# set RasterLayer color table -stack.lsat7_2000_10.cmap = "plasma" - -# plot a single layer using an existing axis -fig, ax = plt.subplots() -stack.lsat7_2000_10.plot(ax=ax) -plt.show() -``` - -For RasterLayers that represent categorical data types, e.g. land cover, then -the `RasterLayer.categorical=True` attribute will cause the cmap to be -converted to a discrete scale. - -The default plot method for a `Raster` object produces a raster-matrix plot of -the individual RasterLayers. By default this plot preserves the plotting -attributes of the individual rasters: - -Plot all RasterLayers in a Raster object: - -```{python} -stack.lsat7_2000_10.cmap = "Blues" -stack.lsat7_2000_20.cmap = "Greens" -stack.lsat7_2000_30.cmap = "Reds" -stack.lsat7_2000_40.cmap = "RdPu" -stack.lsat7_2000_50.cmap = "autumn" -stack.lsat7_2000_70.cmap = "hot" - -stack.plot( - title_fontsize=8, - label_fontsize=6, - legend_fontsize=6, - names=["B1", "B2", "B3", "B4", "B5", "B7"], - fig_kwds={"figsize": (8, 4)}, - subplots_kwds={"wspace": 0.3} -) -plt.show() -``` - -The `Raster.plot` method also provides `cmap` and `norm` arguments that can be -used to override the settings of the individual RasterLayers. Additional -settings can be passed to control plot layout using the `figure_kwds`, -`legend_kwds` and `subplots_kwds` arguments. +--- +title: "Plotting" +format: + html: + code-fold: false + toc: true +jupyter: python3 +--- + +Both `Raster` and `RasterLayer` objects include basic plotting methods. The +plot method for a `RasterLayer` object produces a single raster plot using the +`matplotlib.pyplot.imshow` method. + +For convenience, plot settings such as color ramps and stretches can also be +set for each RasterLayer using the `RasterLayer.cmap` that support matplotlib +cmap's, and the `RasterLayer.norm` attribute to associate a +`matplotlib.colors.Normalize` stretch with each RasterLayer: + +To plot a single RasterLayer: + +```{python} +from pyspatialml import Raster +from pyspatialml.datasets import nc +import matplotlib.pyplot as plt + +stack = Raster([nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7]) + +# set RasterLayer color table +stack.lsat7_2000_10.cmap = "plasma" + +# plot a single layer using an existing axis +fig, ax = plt.subplots() +stack.lsat7_2000_10.plot(ax=ax) +plt.show() +``` + +For RasterLayers that represent categorical data types, e.g. land cover, then +the `RasterLayer.categorical=True` attribute will cause the cmap to be +converted to a discrete scale. + +The default plot method for a `Raster` object produces a raster-matrix plot of +the individual RasterLayers. By default this plot preserves the plotting +attributes of the individual rasters: + +Plot all RasterLayers in a Raster object: + +```{python} +stack.lsat7_2000_10.cmap = "Blues" +stack.lsat7_2000_20.cmap = "Greens" +stack.lsat7_2000_30.cmap = "Reds" +stack.lsat7_2000_40.cmap = "RdPu" +stack.lsat7_2000_50.cmap = "autumn" +stack.lsat7_2000_70.cmap = "hot" + +stack.plot( + title_fontsize=8, + label_fontsize=6, + legend_fontsize=6, + names=["B1", "B2", "B3", "B4", "B5", "B7"], + fig_kwds={"figsize": (8, 4)}, + subplots_kwds={"wspace": 0.3} +) +plt.show() +``` + +The `Raster.plot` method also provides `cmap` and `norm` arguments that can be +used to override the settings of the individual RasterLayers. Additional +settings can be passed to control plot layout using the `figure_kwds`, +`legend_kwds` and `subplots_kwds` arguments. diff --git a/docs/sampling.qmd b/docs/sampling.qmd index 29a08a9..3073e9a 100644 --- a/docs/sampling.qmd +++ b/docs/sampling.qmd @@ -1,43 +1,43 @@ ---- -title: "Random Sampling" -format: - html: - code-fold: false - toc: true -jupyter: python3 ---- - -## Random Uniform Sampling - -For many spatial models, it is common to take a random sample of the -predictors to represent a single class (i.e. an environmental background or -pseudo-absences in a binary classification model). The sample function is -supplied in the sampling module for this purpose: - -```{python} -from pyspatialml import Raster -import pyspatialml.datasets.nc as nc -import matplotlib.pyplot as plt - -predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] -stack = Raster(predictors) - -# extract training data using a random sample -df_rand = stack.sample(size=1000, random_state=1) -df_rand.plot() -``` - -## Stratified Random Sampling - -The sample function also enables stratified random sampling based on passing a -categorical raster dataset to the strata argument. The categorical raster -should spatially overlap with the dataset to be sampled, but it does not need -to be of the same grid resolution. This raster should be passed as a opened -rasterio dataset: - -```{python} -strata = Raster(nc.strata) -df_strata = stack.sample(size=5, strata=strata, random_state=1) -df_strata = df_strata.dropna() -df_strata +--- +title: "Random Sampling" +format: + html: + code-fold: false + toc: true +jupyter: python3 +--- + +## Random Uniform Sampling + +For many spatial models, it is common to take a random sample of the +predictors to represent a single class (i.e. an environmental background or +pseudo-absences in a binary classification model). The sample function is +supplied in the sampling module for this purpose: + +```{python} +from pyspatialml import Raster +import pyspatialml.datasets.nc as nc +import matplotlib.pyplot as plt + +predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] +stack = Raster(predictors) + +# extract training data using a random sample +df_rand = stack.sample(size=1000, random_state=1) +df_rand.plot() +``` + +## Stratified Random Sampling + +The sample function also enables stratified random sampling based on passing a +categorical raster dataset to the strata argument. The categorical raster +should spatially overlap with the dataset to be sampled, but it does not need +to be of the same grid resolution. This raster should be passed as a opened +rasterio dataset: + +```{python} +strata = Raster(nc.strata) +df_strata = stack.sample(size=5, strata=strata, random_state=1) +df_strata = df_strata.dropna() +df_strata ``` \ No newline at end of file diff --git a/docs/spatial-features.qmd b/docs/spatial-features.qmd index 20bf887..5b00189 100644 --- a/docs/spatial-features.qmd +++ b/docs/spatial-features.qmd @@ -1,151 +1,151 @@ ---- -title: "Incorporating Spatial Autocorrelation into Spatial Predictions" -format: - html: - code-fold: false -jupyter: python3 ---- - -Similarly to example 1, we are using the meuse dataset again to perform a multi-target prediction of soil properties using a regression model. However, in this case we will attempt to account for spatial autocorrelation in the model directly by generating new features that are based on the distance-weighted means of surrounding spatial locations. - -```{python} -import geopandas as gpd -import numpy as np -from tempfile import NamedTemporaryFile -from pyspatialml import Raster -import pyspatialml.datasets.meuse as ms -import matplotlib as mpl -import matplotlib.pyplot as plt -``` - -## Preparing the Raster Predictors - -Import the raster predictors from the `pyspatialml.datasets.meuse` module: - -```{python} -predictor_files = ms.predictors -training_pts_file = ms.meuse -stack = Raster(predictor_files) -stack.drop('ffreq') -``` - -In order to generate new features from surrounding spatial locations, we need their x,y coordinates, which will will add to the stack of the raster predictors using the `pyspatialml.preprocessing.xy_coordinates` function: - -```{python} -from pyspatialml.preprocessing import xy_coordinates - -xy_layers = xy_coordinates(stack.iloc[0], NamedTemporaryFile(suffix=".tif").name) -stack = stack.append(xy_layers, in_place=False) -``` - -Quickly plot the raster predictors: - -```{python} -mpl.style.use('seaborn-v0_8') -axs = stack.plot(figsize=(9, 7)) -ax = axs.flatten()[10] -im = ax.images -im[0].colorbar.set_ticks([1,2,3]) -ax = axs.flatten()[8] -ax.tick_params(axis='x', labelrotation=65) - -plt.tight_layout() -plt.show() -``` - -## Extract the Training Data - -Spatially query the raster predictors at the training point locations: - -```{python} -training_pts = gpd.read_file(training_pts_file) -training_df = stack.extract_vector(gdf=training_pts) - -training_df.index = training_df.index.get_level_values("geometry_idx") -training_df = training_df.merge( - training_pts.loc[:, ("lead", "cadmium", "copper", "zinc", "om")], - left_index=True, - right_index=True -) -training_df = training_df.dropna() -``` - -Split the response/target variables from the predictors: - -```{python} -X = training_df.loc[:, stack.names].values -y = training_df.loc[:, ['lead', 'cadmium', 'copper', 'zinc', 'om']].values -``` - -## Develop a Spatially-Lagged Machine Learning Model - -As well as using the ExtraTreeRegressor model which was also used in example 1, here we will use the custom `pyspatialml.estimators.SpatialLagRegressor` metalearner class to wrap the extratrees regressor into a model that adds a new feature based on the distance-weighted mean of spatially-proximal observations: - -```{python} -from sklearn.pipeline import Pipeline -from sklearn.ensemble import ExtraTreesRegressor -from sklearn.preprocessing import OneHotEncoder -from sklearn.compose import ColumnTransformer -from pyspatialml.transformers import KNNTransformer -from sklearn.model_selection import cross_validate, KFold -from sklearn.model_selection import GridSearchCV - -# define regressor -et = ExtraTreesRegressor(n_estimators=500, n_jobs=-1, random_state=1234) - -soil_index = list(stack.names).index("soil") -xy_indexes = [list(stack.names).index(i) for i in ["x_coordinates", "y_coordinates"]] - -preproc = ColumnTransformer([ - ('ohe', OneHotEncoder(categories='auto', handle_unknown='ignore'), [soil_index]), - ('lags', KNNTransformer(weights='distance', measure="mean"), xy_indexes) -], remainder='passthrough') - -wflow = Pipeline([ - ('preproc', preproc), - ('regressor', et) -]) - -search_grid = {"preproc__lags__n_neighbors": [3, 5, 7, 9]} -inner = KFold(n_splits=3, shuffle=True, random_state=1234) -model = GridSearchCV(wflow, param_grid=search_grid, cv=inner, scoring="r2") -``` - -Fit the model and cross-validate: - -```{python} -model = model.fit(X, y) -model.best_params_ -``` - -```{python} -outer = KFold(n_splits=10, shuffle=True, random_state=1234) - -scores = cross_validate(model, X, y, scoring='neg_mean_squared_error', cv=outer, n_jobs=1) -rmse = np.sqrt(-scores['test_score']).mean() - -print("Our RMSE score is {}".format(rmse)) -``` - -Comparing the RMSE score the the score obtained in example 1, where the spatial structure of the training data was accounted for indirectly by added a variety of raster distance measures, we can see that the RMSE score is slightly improved. - -## Multi-Target Predictions - -```{python} -preds = stack.predict(model) -preds.rename( - {old: new for old, new in zip(preds.names, ['lead', 'cadmium', 'copper', 'zinc', 'om'])}, - in_place=True -) -preds.lead.cmap = 'rainbow' -preds.cadmium.cmap = 'rainbow' -preds.copper.cmap = 'rainbow' -preds.zinc.cmap = 'rainbow' -preds.om.cmap = 'rainbow' -``` - -```{python} -preds.plot(out_shape=(200, 200), title_fontsize=14, figsize=(10, 8)) -plt.show() -``` - +--- +title: "Incorporating Spatial Autocorrelation into Spatial Predictions" +format: + html: + code-fold: false +jupyter: python3 +--- + +Similarly to example 1, we are using the meuse dataset again to perform a multi-target prediction of soil properties using a regression model. However, in this case we will attempt to account for spatial autocorrelation in the model directly by generating new features that are based on the distance-weighted means of surrounding spatial locations. + +```{python} +import geopandas as gpd +import numpy as np +from tempfile import NamedTemporaryFile +from pyspatialml import Raster +import pyspatialml.datasets.meuse as ms +import matplotlib as mpl +import matplotlib.pyplot as plt +``` + +## Preparing the Raster Predictors + +Import the raster predictors from the `pyspatialml.datasets.meuse` module: + +```{python} +predictor_files = ms.predictors +training_pts_file = ms.meuse +stack = Raster(predictor_files) +stack.drop('ffreq') +``` + +In order to generate new features from surrounding spatial locations, we need their x,y coordinates, which will will add to the stack of the raster predictors using the `pyspatialml.preprocessing.xy_coordinates` function: + +```{python} +from pyspatialml.preprocessing import xy_coordinates + +xy_layers = xy_coordinates(stack.iloc[0], NamedTemporaryFile(suffix=".tif").name) +stack = stack.append(xy_layers, in_place=False) +``` + +Quickly plot the raster predictors: + +```{python} +mpl.style.use('seaborn-v0_8') +axs = stack.plot(figsize=(9, 7)) +ax = axs.flatten()[10] +im = ax.images +im[0].colorbar.set_ticks([1,2,3]) +ax = axs.flatten()[8] +ax.tick_params(axis='x', labelrotation=65) + +plt.tight_layout() +plt.show() +``` + +## Extract the Training Data + +Spatially query the raster predictors at the training point locations: + +```{python} +training_pts = gpd.read_file(training_pts_file) +training_df = stack.extract_vector(gdf=training_pts) + +training_df.index = training_df.index.get_level_values("geometry_idx") +training_df = training_df.merge( + training_pts.loc[:, ("lead", "cadmium", "copper", "zinc", "om")], + left_index=True, + right_index=True +) +training_df = training_df.dropna() +``` + +Split the response/target variables from the predictors: + +```{python} +X = training_df.loc[:, stack.names].values +y = training_df.loc[:, ['lead', 'cadmium', 'copper', 'zinc', 'om']].values +``` + +## Develop a Spatially-Lagged Machine Learning Model + +As well as using the ExtraTreeRegressor model which was also used in example 1, here we will use the custom `pyspatialml.estimators.SpatialLagRegressor` metalearner class to wrap the extratrees regressor into a model that adds a new feature based on the distance-weighted mean of spatially-proximal observations: + +```{python} +from sklearn.pipeline import Pipeline +from sklearn.ensemble import ExtraTreesRegressor +from sklearn.preprocessing import OneHotEncoder +from sklearn.compose import ColumnTransformer +from pyspatialml.transformers import KNNTransformer +from sklearn.model_selection import cross_validate, KFold +from sklearn.model_selection import GridSearchCV + +# define regressor +et = ExtraTreesRegressor(n_estimators=500, n_jobs=-1, random_state=1234) + +soil_index = list(stack.names).index("soil") +xy_indexes = [list(stack.names).index(i) for i in ["x_coordinates", "y_coordinates"]] + +preproc = ColumnTransformer([ + ('ohe', OneHotEncoder(categories='auto', handle_unknown='ignore'), [soil_index]), + ('lags', KNNTransformer(weights='distance', measure="mean"), xy_indexes) +], remainder='passthrough') + +wflow = Pipeline([ + ('preproc', preproc), + ('regressor', et) +]) + +search_grid = {"preproc__lags__n_neighbors": [3, 5, 7, 9]} +inner = KFold(n_splits=3, shuffle=True, random_state=1234) +model = GridSearchCV(wflow, param_grid=search_grid, cv=inner, scoring="r2") +``` + +Fit the model and cross-validate: + +```{python} +model = model.fit(X, y) +model.best_params_ +``` + +```{python} +outer = KFold(n_splits=10, shuffle=True, random_state=1234) + +scores = cross_validate(model, X, y, scoring='neg_mean_squared_error', cv=outer, n_jobs=1) +rmse = np.sqrt(-scores['test_score']).mean() + +print("Our RMSE score is {}".format(rmse)) +``` + +Comparing the RMSE score the the score obtained in example 1, where the spatial structure of the training data was accounted for indirectly by added a variety of raster distance measures, we can see that the RMSE score is slightly improved. + +## Multi-Target Predictions + +```{python} +preds = stack.predict(model) +preds.rename( + {old: new for old, new in zip(preds.names, ['lead', 'cadmium', 'copper', 'zinc', 'om'])}, + in_place=True +) +preds.lead.cmap = 'rainbow' +preds.cadmium.cmap = 'rainbow' +preds.copper.cmap = 'rainbow' +preds.zinc.cmap = 'rainbow' +preds.om.cmap = 'rainbow' +``` + +```{python} +preds.plot(out_shape=(200, 200), title_fontsize=14, figsize=(10, 8)) +plt.show() +``` + diff --git a/docs/transformers.qmd b/docs/transformers.qmd index f92faed..8f4f9d6 100644 --- a/docs/transformers.qmd +++ b/docs/transformers.qmd @@ -1,67 +1,67 @@ ---- -title: "Transformers" -format: - html: - code-fold: false - toc: true -jupyter: python3 ---- - -The transformers module contains classes that are used for spatial feature engineering. - -## Spatial Lag Transformer - -A transformer to create spatial lag variables by using a -weighted mean/mode of the values of the K-neighboring observations. The -weighted mean/mode of the surrounding observations are appended as a new -feature to the right-most column in the training data. The `measure` parameter -should be set to 'mode' for classification, and 'mean' for regression. - -``` -KNNTransformer( - n_neighbors=7, - weights="distance", - measure="mean", - radius=1.0, - algorithm="auto", - leaf_size=30, - metric="minkowski", - p=2, - normalize=True, - metric_params=None, - kernel_params=None, - n_jobs=1 -) -``` - -## GeoDistTransformer - -A common spatial feature engineering task is to create new features that -describe the proximity to some reference locations. The GeoDistTransformer -can be used to add these features as part of a machine learning pipeline. - -``` -GeoDistTransformer(refs, log=False) -``` - -Where `refs` are an array of coordinates of reference locations in -(m, n-dimensional) order, such as -{n_locations, x_coordinates, y_coordinates, ...} for as many dimensions as -required. For example to calculate distances to a single x,y,z location: - -``` -refs = [-57.345, -110.134, 1012] -``` - -And to calculate distances to three x,y reference locations: - -``` -refs = [ - [-57.345, -110.134], - [-56.345, -109.123], - [-58.534, -112.123] -] -``` - -The supplied array has to have at least x,y coordinates with a -(1, 2) shape for a single location. +--- +title: "Transformers" +format: + html: + code-fold: false + toc: true +jupyter: python3 +--- + +The transformers module contains classes that are used for spatial feature engineering. + +## Spatial Lag Transformer + +A transformer to create spatial lag variables by using a +weighted mean/mode of the values of the K-neighboring observations. The +weighted mean/mode of the surrounding observations are appended as a new +feature to the right-most column in the training data. The `measure` parameter +should be set to 'mode' for classification, and 'mean' for regression. + +``` +KNNTransformer( + n_neighbors=7, + weights="distance", + measure="mean", + radius=1.0, + algorithm="auto", + leaf_size=30, + metric="minkowski", + p=2, + normalize=True, + metric_params=None, + kernel_params=None, + n_jobs=1 +) +``` + +## GeoDistTransformer + +A common spatial feature engineering task is to create new features that +describe the proximity to some reference locations. The GeoDistTransformer +can be used to add these features as part of a machine learning pipeline. + +``` +GeoDistTransformer(refs, log=False) +``` + +Where `refs` are an array of coordinates of reference locations in +(m, n-dimensional) order, such as +{n_locations, x_coordinates, y_coordinates, ...} for as many dimensions as +required. For example to calculate distances to a single x,y,z location: + +``` +refs = [-57.345, -110.134, 1012] +``` + +And to calculate distances to three x,y reference locations: + +``` +refs = [ + [-57.345, -110.134], + [-56.345, -109.123], + [-58.534, -112.123] +] +``` + +The supplied array has to have at least x,y coordinates with a +(1, 2) shape for a single location. diff --git a/docs/usage.qmd b/docs/usage.qmd index 6c949c8..80843d8 100644 --- a/docs/usage.qmd +++ b/docs/usage.qmd @@ -1,32 +1,32 @@ ---- -title: "Usage" -format: html ---- - -## The Raster class - -The main approach to working with raster datasets in Pyspatialml is through the `Raster` class. The `Raster` object takes a list of GDAL-supported raster datasets and references them as part of a single Raster object, which can be used to perform operations on the raster datasets as a whole. The `Raster` object is a thin wrapper around the `rasterio` library, which is a Python library for reading and writing raster datasets. The individual bands within the datasets are represented internally as `RasterLayer` objects. This allows for retaining metadata about each raster dataset and adding or removing raster datasets from the stack without making physical changes to the disk. - -Note that in order to initiate a Raster object, the underlying raster datasets must be spatially aligned in terms of their extent, resolution, and coordinate reference system - Raster objects do not perform any resampling or reprojection of the underlying datasets. Functions within the `preprocessing` module can be used to align raster datasets before creating a Raster object. - -### Creating a Raster - -The most common approach of initiating a Raster object is from an existing raster dataset, or a list of raster datasets. Alternatively, a Raster object can also be initiated from a 3D numpy array: - -- ``Raster(src=[raster1.tif, raster2.tif, raster3.tif])`` creates a Raster object from existing file-based GDAL-supported datasets, or a single raster dataset. The file-based datasets can contain single or multiple bands. - -- ``Raster(src=new_numpy_array, crs=crs, transform=transform)`` creates a Raster object from a 3D numpy array (band, row, column). The ``crs`` and ``transform`` arguments are optional but are required to provide coordinate reference system information to the Raster object. The crs argument has to be represented by ```rasterio crs.CRS``` object, and the transform parameter requires an ```affine.Affine``` object. - -Rasters can also be initated directly from a `rasterio.Band` object(s), or from a list of `RasterLayer` objects (see below). - -### RasterLayers - -Generally, Pyspatialml intends users to work with the Raster object. However, internally, the Raster object is composed of RasterLayer objects, which represent individual bands of a raster dataset. RasterLayers are based on a ``rasterio.band`` object with some additional attributes and methods. However, unlike the `rasterio.Band.ds.read` method which reads all bands within a multi-band dataset, the RasterLayer read method always refers to a single band. - -Methods contained within RasterLayer objects are specifically designed to be applied to individual bands of a raster. These methods include operations such as sieve-clump, distance to non-NaN pixels, and arithmetic operations on individual layers. - -## Principles of working with Rasters - -Methods that are applied to Raster objects are generally designed to be applied to the entire stack of raster datasets. For example, the `crop` method will crop all raster datasets in the stack to a common extent, and the `mask` method will apply a mask to all raster datasets in the stack. These methods always return a new Raster object, and do not modify the original Raster object by default. Subsetting of individual bands uses the same principles as the `pandas` library, where the `loc` method is used to subset bands based on their names, and the `iloc` method is used to subset bands based on their index. Also similarly to `pandas`, subsetting a single band will return the object itself, in this case, a RasterLayer object, while subsetting multiple bands will return a new Raster object. - -Methods that apply to individual RasterLayers are mostly related to extracting or summarizing metadata from the individual bands. For other methods that users may want to apply to individual bands, it is recommended to work with `rasterio` directly. +--- +title: "Usage" +format: html +--- + +## The Raster class + +The main approach to working with raster datasets in Pyspatialml is through the `Raster` class. The `Raster` object takes a list of GDAL-supported raster datasets and references them as part of a single Raster object, which can be used to perform operations on the raster datasets as a whole. The `Raster` object is a thin wrapper around the `rasterio` library, which is a Python library for reading and writing raster datasets. The individual bands within the datasets are represented internally as `RasterLayer` objects. This allows for retaining metadata about each raster dataset and adding or removing raster datasets from the stack without making physical changes to the disk. + +Note that in order to initiate a Raster object, the underlying raster datasets must be spatially aligned in terms of their extent, resolution, and coordinate reference system - Raster objects do not perform any resampling or reprojection of the underlying datasets. Functions within the `preprocessing` module can be used to align raster datasets before creating a Raster object. + +### Creating a Raster + +The most common approach of initiating a Raster object is from an existing raster dataset, or a list of raster datasets. Alternatively, a Raster object can also be initiated from a 3D numpy array: + +- ``Raster(src=[raster1.tif, raster2.tif, raster3.tif])`` creates a Raster object from existing file-based GDAL-supported datasets, or a single raster dataset. The file-based datasets can contain single or multiple bands. + +- ``Raster(src=new_numpy_array, crs=crs, transform=transform)`` creates a Raster object from a 3D numpy array (band, row, column). The ``crs`` and ``transform`` arguments are optional but are required to provide coordinate reference system information to the Raster object. The crs argument has to be represented by ```rasterio crs.CRS``` object, and the transform parameter requires an ```affine.Affine``` object. + +Rasters can also be initated directly from a `rasterio.Band` object(s), or from a list of `RasterLayer` objects (see below). + +### RasterLayers + +Generally, Pyspatialml intends users to work with the Raster object. However, internally, the Raster object is composed of RasterLayer objects, which represent individual bands of a raster dataset. RasterLayers are based on a ``rasterio.band`` object with some additional attributes and methods. However, unlike the `rasterio.Band.ds.read` method which reads all bands within a multi-band dataset, the RasterLayer read method always refers to a single band. + +Methods contained within RasterLayer objects are specifically designed to be applied to individual bands of a raster. These methods include operations such as sieve-clump, distance to non-NaN pixels, and arithmetic operations on individual layers. + +## Principles of working with Rasters + +Methods that are applied to Raster objects are generally designed to be applied to the entire stack of raster datasets. For example, the `crop` method will crop all raster datasets in the stack to a common extent, and the `mask` method will apply a mask to all raster datasets in the stack. These methods always return a new Raster object, and do not modify the original Raster object by default. Subsetting of individual bands uses the same principles as the `pandas` library, where the `loc` method is used to subset bands based on their names, and the `iloc` method is used to subset bands based on their index. Also similarly to `pandas`, subsetting a single band will return the object itself, in this case, a RasterLayer object, while subsetting multiple bands will return a new Raster object. + +Methods that apply to individual RasterLayers are mostly related to extracting or summarizing metadata from the individual bands. For other methods that users may want to apply to individual bands, it is recommended to work with `rasterio` directly. diff --git a/index.qmd b/index.qmd index 775313d..947a845 100644 --- a/index.qmd +++ b/index.qmd @@ -1,55 +1,55 @@ ---- -title: "Overview" -format: html ---- - -Pyspatialml is a Python package for applying scikit-learn machine learning -models to raster-based datasets. It is inspired by the famous -[raster](https://cran.r-project.org/web/packages/raster/index.html) -package in the R statistical programming language which has been extensively -used for applying statistical and machine learning models to geospatial raster -datasets. - -Pyspatialml includes functions and classes for working with multiple raster -datasets and applying typical machine learning workflows including raster data -manipulation, feature engineering on raster datasets, extraction of training -data, and application of the ``predict`` or ``predict_proba`` methods of -scikit-learn estimator objects to a stack of raster datasets. - -Pyspatialml is built upon the -[rasterio](https://rasterio.readthedocs.io/en/latest/) Python package which -performs all of the heavy lifting and is designed to work with the -[geopandas](https://geopandas.org) package for related raster-vector data -geoprocessing operations. - -## Purpose - -A supervised machine-learning workflow as applied to spatial raster data -typically involves several steps: - -1. Using vector features or labelled pixels to extract training data from a - stack of raster-based predictors (e.g. spectral bands, terrain derivatives, - or climate grids). The training data represent locations when some - property/state/concentration is already established, and might comprise - point locations of arsenic concentrations, or labelled pixels with - integer-encoded values that correspond to known landcover types. - -2. Developing a machine learning classification or regression model on the - training data. Pyspatialml is designed to use scikit-learn compatible api's - for this purpose. -3. Applying the fitted machine learning model to make predictions on all of - the pixels in the stack of raster data. - -Pyspatialml is designed to make it easy to develop spatial prediction models on -stacks of 2D raster datasets that are held on disk. Unlike using python's -``numpy`` module directly where raster datasets need to be held in memory, the -majority of functions within pyspatialml work with raster datasets that are -stored on disk and allow processing operations to be performed on datasets that -are too large to be loaded into memory. - -Pyspatialml is designed to make it easy to work with typical raster data stacks -consisting of multiple 2D grids such as different spectal bands, maps etc. -However, it's purpose is not to work with multidimensional datasets, i.e. those -that have more than 3 dimensions such as spacetime cubes of multiband data. The -[xarray](http://xarray.pydata.org/en/stable/index.html) package can provide a -structure for this type of data. +--- +title: "Overview" +format: html +--- + +Pyspatialml is a Python package for applying scikit-learn machine learning +models to raster-based datasets. It is inspired by the famous +[raster](https://cran.r-project.org/web/packages/raster/index.html) +package in the R statistical programming language which has been extensively +used for applying statistical and machine learning models to geospatial raster +datasets. + +Pyspatialml includes functions and classes for working with multiple raster +datasets and applying typical machine learning workflows including raster data +manipulation, feature engineering on raster datasets, extraction of training +data, and application of the ``predict`` or ``predict_proba`` methods of +scikit-learn estimator objects to a stack of raster datasets. + +Pyspatialml is built upon the +[rasterio](https://rasterio.readthedocs.io/en/latest/) Python package which +performs all of the heavy lifting and is designed to work with the +[geopandas](https://geopandas.org) package for related raster-vector data +geoprocessing operations. + +## Purpose + +A supervised machine-learning workflow as applied to spatial raster data +typically involves several steps: + +1. Using vector features or labelled pixels to extract training data from a + stack of raster-based predictors (e.g. spectral bands, terrain derivatives, + or climate grids). The training data represent locations when some + property/state/concentration is already established, and might comprise + point locations of arsenic concentrations, or labelled pixels with + integer-encoded values that correspond to known landcover types. + +2. Developing a machine learning classification or regression model on the + training data. Pyspatialml is designed to use scikit-learn compatible api's + for this purpose. +3. Applying the fitted machine learning model to make predictions on all of + the pixels in the stack of raster data. + +Pyspatialml is designed to make it easy to develop spatial prediction models on +stacks of 2D raster datasets that are held on disk. Unlike using python's +``numpy`` module directly where raster datasets need to be held in memory, the +majority of functions within pyspatialml work with raster datasets that are +stored on disk and allow processing operations to be performed on datasets that +are too large to be loaded into memory. + +Pyspatialml is designed to make it easy to work with typical raster data stacks +consisting of multiple 2D grids such as different spectal bands, maps etc. +However, it's purpose is not to work with multidimensional datasets, i.e. those +that have more than 3 dimensions such as spacetime cubes of multiband data. The +[xarray](http://xarray.pydata.org/en/stable/index.html) package can provide a +structure for this type of data. diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index a8da50b..0000000 --- a/poetry.lock +++ /dev/null @@ -1,3756 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "affine" -version = "2.4.0" -description = "Matrices describing affine transformation of the plane" -optional = false -python-versions = ">=3.7" -files = [ - {file = "affine-2.4.0-py3-none-any.whl", hash = "sha256:8a3df80e2b2378aef598a83c1392efd47967afec4242021a0b06b4c7cbc61a92"}, - {file = "affine-2.4.0.tar.gz", hash = "sha256:a24d818d6a836c131976d22f8c27b8d3ca32d0af64c1d8d29deb7bafa4da1eea"}, -] - -[package.extras] -dev = ["coveralls", "flake8", "pydocstyle"] -test = ["pytest (>=4.6)", "pytest-cov"] - -[[package]] -name = "annotated-types" -version = "0.6.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, -] - -[[package]] -name = "anyio" -version = "4.3.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "argon2-cffi" -version = "23.1.0" -description = "Argon2 for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, -] - -[package.dependencies] -argon2-cffi-bindings = "*" - -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - -[[package]] -name = "argon2-cffi-bindings" -version = "21.2.0" -description = "Low-level CFFI bindings for Argon2" -optional = false -python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] - -[package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] - -[[package]] -name = "arrow" -version = "1.3.0" -description = "Better dates & times for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" -types-python-dateutil = ">=2.8.10" - -[package.extras] -doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] - -[[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[package.dependencies] -six = ">=1.12.0" - -[package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "async-lru" -version = "2.0.4" -description = "Simple LRU cache for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - -[[package]] -name = "babel" -version = "2.15.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.8" -files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, -] - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "beartype" -version = "0.18.5" -description = "Unbearably fast runtime type checking in pure Python." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "beartype-0.18.5-py3-none-any.whl", hash = "sha256:5301a14f2a9a5540fe47ec6d34d758e9cd8331d36c4760fc7a5499ab86310089"}, - {file = "beartype-0.18.5.tar.gz", hash = "sha256:264ddc2f1da9ec94ff639141fbe33d22e12a9f75aa863b83b7046ffff1381927"}, -] - -[package.extras] -all = ["typing-extensions (>=3.10.0.0)"] -dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "equinox", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] -doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] -test-tox = ["equinox", "mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] -test-tox-coverage = ["coverage (>=5.5)"] - -[[package]] -name = "beautifulsoup4" -version = "4.12.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "black" -version = "24.4.2" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "bleach" -version = "6.1.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, -] - -[package.dependencies] -six = ">=1.9.0" -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] - -[[package]] -name = "certifi" -version = "2024.7.4" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, -] - -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "click-plugins" -version = "1.1.1" -description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -optional = false -python-versions = "*" -files = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] - -[package.dependencies] -click = ">=4.0" - -[package.extras] -dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] - -[[package]] -name = "cligj" -version = "0.7.2" -description = "Click params for commmand line interfaces to GeoJSON" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, <4" -files = [ - {file = "cligj-0.7.2-py3-none-any.whl", hash = "sha256:c1ca117dbce1fe20a5809dc96f01e1c2840f6dcc939b3ddbb1111bf330ba82df"}, - {file = "cligj-0.7.2.tar.gz", hash = "sha256:a4bc13d623356b373c2c27c53dbd9c68cae5d526270bfa71f6c6fa69669c6b27"}, -] - -[package.dependencies] -click = ">=4.0" - -[package.extras] -test = ["pytest-cov"] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "comm" -version = "0.2.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -optional = false -python-versions = ">=3.8" -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[package.dependencies] -traitlets = ">=4" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "contourpy" -version = "1.2.1" -description = "Python library for calculating contours of 2D quadrilateral grids" -optional = false -python-versions = ">=3.9" -files = [ - {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, - {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, - {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, - {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, - {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, - {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, - {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, - {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, - {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, - {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, - {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, - {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, - {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, - {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, - {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, - {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, - {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, - {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, - {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, - {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, - {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, - {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, - {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, - {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, - {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, -] - -[package.dependencies] -numpy = ">=1.20" - -[package.extras] -bokeh = ["bokeh", "selenium"] -docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] -mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] -test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] -test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] - -[[package]] -name = "coverage" -version = "7.5.3" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cycler" -version = "0.12.1" -description = "Composable style cycles" -optional = false -python-versions = ">=3.8" -files = [ - {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, - {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, -] - -[package.extras] -docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] -tests = ["pytest", "pytest-cov", "pytest-xdist"] - -[[package]] -name = "debugpy" -version = "1.8.1" -description = "An implementation of the Debug Adapter Protocol for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741"}, - {file = "debugpy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e"}, - {file = "debugpy-1.8.1-cp310-cp310-win32.whl", hash = "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0"}, - {file = "debugpy-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd"}, - {file = "debugpy-1.8.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb"}, - {file = "debugpy-1.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099"}, - {file = "debugpy-1.8.1-cp311-cp311-win32.whl", hash = "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146"}, - {file = "debugpy-1.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8"}, - {file = "debugpy-1.8.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539"}, - {file = "debugpy-1.8.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace"}, - {file = "debugpy-1.8.1-cp312-cp312-win32.whl", hash = "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0"}, - {file = "debugpy-1.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98"}, - {file = "debugpy-1.8.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39"}, - {file = "debugpy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7"}, - {file = "debugpy-1.8.1-cp38-cp38-win32.whl", hash = "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9"}, - {file = "debugpy-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234"}, - {file = "debugpy-1.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42"}, - {file = "debugpy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703"}, - {file = "debugpy-1.8.1-cp39-cp39-win32.whl", hash = "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23"}, - {file = "debugpy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3"}, - {file = "debugpy-1.8.1-py2.py3-none-any.whl", hash = "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242"}, - {file = "debugpy-1.8.1.zip", hash = "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.1" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "executing" -version = "2.0.1" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.5" -files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "fastjsonschema" -version = "2.19.1" -description = "Fastest Python implementation of JSON schema" -optional = false -python-versions = "*" -files = [ - {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, - {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, -] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "fiona" -version = "1.10.0" -description = "Fiona reads and writes spatial data files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fiona-1.10.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:2f8c6e7d2e5e2dc3b72b520cd160ca6d3689748b20d35b518eec9073f5a3c5bf"}, - {file = "fiona-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b2a42fb0a7168003015ab9648712f7d7eb971237af3c9be6000df69cd4b97ba7"}, - {file = "fiona-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d95a93d6d37e04232b141d9f08190b30e7dc368126a34342d764b9d0a857d770"}, - {file = "fiona-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:382cffc905d44a933951857ae07f79cf59567db08f4201ed2a28f9f9bcc86932"}, - {file = "fiona-1.10.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:2854fca6478ca3c1dc3f06531025550046f405488909a5be11ea97e4c55894db"}, - {file = "fiona-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef27d75046e57b3e55c4b5d4e1bf7e43ef490e73ada5e86f9b148aa23a2e7fd4"}, - {file = "fiona-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16072f9313f60220310951561c5df29122d078d26316455dc58516c330881d35"}, - {file = "fiona-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a7f28c5667d5cb05891fd00ed8ca2d3067bb58c90ca928a2741c0da74daf10d"}, - {file = "fiona-1.10.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:d099b1b5ab99db1d144ac47f657e14471178537d878ebd016c37afdf37303399"}, - {file = "fiona-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ad1ea80555bc56047d4b4e60c9424cf66875f161b3e56ebd60fa0574dc6693f"}, - {file = "fiona-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a273cc957e86289609b47155f75d60c2cc37974a92618e016f75bc5ab9cc2dec"}, - {file = "fiona-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:ef44b3b0704244503a5f12ae92ca88b2cf4fedc9fd881b00c7c58b6899ca5133"}, - {file = "fiona-1.10.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:80929d220a92b50592b5ce96233d631c61a3c9b9bebd26ea136e7e48ffb50616"}, - {file = "fiona-1.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82514e8cc113245f2c7deda1c66f0aba67b183e20404365bbb7347e12b35c80f"}, - {file = "fiona-1.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1a555b2546d3c69d0826a526a5c3e50a1cd330c95e2e0ebf6e82fa81fc2d96"}, - {file = "fiona-1.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:bf11de868ab601b8d917dc38719b39bf8bdeaadc4fda9964d96b4b354e528fff"}, - {file = "fiona-1.10.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:c0e72c4ed6ce7e2b2531a62844f3216528c149b55b89ff2881bcfe34d3b37300"}, - {file = "fiona-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:065328fcc61caeca7c5d4b0b1b741213e422997ea66eaa04c2e207e1609eb0e7"}, - {file = "fiona-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34575acb68b41009b28237121c728015aebb2ab3969cf46e57d9b9e5510ece61"}, - {file = "fiona-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:bfc9b76345a58ddcf9aae8697eb444728ff7a2cde7d5925f3031ff25e9eaa339"}, - {file = "fiona-1.10.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:f335beb927c5e9d9997a9cf665d6eee2e24191a02582f01786afa76119b71110"}, - {file = "fiona-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5f1db6fc3f9212fdc817628981731e0adcdce88b0afd30531986c732a75f6a98"}, - {file = "fiona-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc68efb479a04be9a8e9c381940493c22a47f6aec5bbec22d0bb57a9cb240296"}, - {file = "fiona-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:d99d2faa8cb3590dc0018a2832af39e1930b39cfff53017f3681d505bf813fee"}, - {file = "fiona-1.10.0.tar.gz", hash = "sha256:3529fd46d269ff3f70aeb9316a93ae95cf2f87d7e148a8ff0d68532bf81ff7ae"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -certifi = "*" -click = ">=8.0,<9.0" -click-plugins = ">=1.0" -cligj = ">=0.5" -importlib-metadata = {version = "*", markers = "python_version < \"3.10\""} - -[package.extras] -all = ["fiona[calc,s3,test]"] -calc = ["pyparsing", "shapely"] -s3 = ["boto3 (>=1.3.1)"] -test = ["aiohttp", "fiona[s3]", "fsspec", "pytest (>=7)", "pytest-cov", "pytz"] - -[[package]] -name = "fonttools" -version = "4.51.0" -description = "Tools to manipulate font files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, - {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, - {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, - {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, - {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, - {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, - {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, - {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, - {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, - {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, - {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, - {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, - {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, - {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, - {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, - {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, - {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, - {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, - {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, - {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, - {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, - {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, - {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, - {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, - {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, - {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, - {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, - {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, - {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, - {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, - {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, - {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, - {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, - {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, - {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, - {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, - {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, - {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, - {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, - {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, - {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, - {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, -] - -[package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] -graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres", "pycairo", "scipy"] -lxml = ["lxml (>=4.0)"] -pathops = ["skia-pathops (>=0.5.0)"] -plot = ["matplotlib"] -repacker = ["uharfbuzz (>=0.23.0)"] -symfont = ["sympy"] -type1 = ["xattr"] -ufo = ["fs (>=2.2.0,<3)"] -unicode = ["unicodedata2 (>=15.1.0)"] -woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] - -[[package]] -name = "fqdn" -version = "1.5.1" -description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -optional = false -python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -files = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] - -[[package]] -name = "geopandas" -version = "0.14.4" -description = "Geographic pandas extensions" -optional = false -python-versions = ">=3.9" -files = [ - {file = "geopandas-0.14.4-py3-none-any.whl", hash = "sha256:3bb6473cb59d51e1a7fe2dbc24a1a063fb0ebdeddf3ce08ddbf8c7ddc99689aa"}, - {file = "geopandas-0.14.4.tar.gz", hash = "sha256:56765be9d58e2c743078085db3bd07dc6be7719f0dbe1dfdc1d705cb80be7c25"}, -] - -[package.dependencies] -fiona = ">=1.8.21" -numpy = ">=1.22" -packaging = "*" -pandas = ">=1.4.0" -pyproj = ">=3.3.0" -shapely = ">=1.8.0" - -[[package]] -name = "griffe" -version = "0.45.1" -description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." -optional = false -python-versions = ">=3.8" -files = [ - {file = "griffe-0.45.1-py3-none-any.whl", hash = "sha256:12194c10ae07a7f46708741ad78419362cf8e5c883f449c7c48de1686611b853"}, - {file = "griffe-0.45.1.tar.gz", hash = "sha256:84ce9243a9e63c07d55563a735a0d07ef70b46c455616c174010e7fc816f4648"}, -] - -[package.dependencies] -colorama = ">=0.4" - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "importlib-metadata" -version = "7.1.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "importlib-resources" -version = "6.4.0" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "ipykernel" -version = "6.29.4" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.29.4-py3-none-any.whl", hash = "sha256:1181e653d95c6808039c509ef8e67c4126b3b3af7781496c7cbfb5ed938a27da"}, - {file = "ipykernel-6.29.4.tar.gz", hash = "sha256:3d44070060f9475ac2092b760123fadf105d2e2493c24848b6691a7c4f42af5c"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.18.1" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.9" -files = [ - {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, - {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} - -[package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] - -[[package]] -name = "ipywidgets" -version = "8.1.2" -description = "Jupyter interactive widgets" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ipywidgets-8.1.2-py3-none-any.whl", hash = "sha256:bbe43850d79fb5e906b14801d6c01402857996864d1e5b6fa62dd2ee35559f60"}, - {file = "ipywidgets-8.1.2.tar.gz", hash = "sha256:d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9"}, -] - -[package.dependencies] -comm = ">=0.1.3" -ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.10,<3.1.0" -traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.10,<4.1.0" - -[package.extras] -test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] - -[[package]] -name = "isoduration" -version = "20.11.0" -description = "Operations with ISO 8601 durations" -optional = false -python-versions = ">=3.7" -files = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] - -[package.dependencies] -arrow = ">=0.15.0" - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - -[[package]] -name = "json5" -version = "0.9.25" -description = "A Python implementation of the JSON5 data format." -optional = false -python-versions = ">=3.8" -files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, -] - -[[package]] -name = "jsonpointer" -version = "2.4" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, -] - -[[package]] -name = "jsonschema" -version = "4.22.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, - {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -rpds-py = ">=0.7.1" -uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - -[[package]] -name = "jupyter" -version = "1.0.0" -description = "Jupyter metapackage. Install all the Jupyter components in one go." -optional = false -python-versions = "*" -files = [ - {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, - {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, - {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, -] - -[package.dependencies] -ipykernel = "*" -ipywidgets = "*" -jupyter-console = "*" -nbconvert = "*" -notebook = "*" -qtconsole = "*" - -[[package]] -name = "jupyter-client" -version = "8.6.1" -description = "Jupyter protocol implementation and client libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_client-8.6.1-py3-none-any.whl", hash = "sha256:3b7bd22f058434e3b9a7ea4b1500ed47de2713872288c0d511d19926f99b459f"}, - {file = "jupyter_client-8.6.1.tar.gz", hash = "sha256:e842515e2bab8e19186d89fdfea7abd15e39dd581f94e399f00e2af5a1652d3f"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = ">=5.3" - -[package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-console" -version = "6.6.3" -description = "Jupyter terminal console" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, - {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, -] - -[package.dependencies] -ipykernel = ">=6.14" -ipython = "*" -jupyter-client = ">=7.0.0" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -prompt-toolkit = ">=3.0.30" -pygments = "*" -pyzmq = ">=17" -traitlets = ">=5.4" - -[package.extras] -test = ["flaky", "pexpect", "pytest"] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-events" -version = "0.10.0" -description = "Jupyter Event System library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, - {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, -] - -[package.dependencies] -jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} -python-json-logger = ">=2.0.4" -pyyaml = ">=5.3" -referencing = "*" -rfc3339-validator = "*" -rfc3986-validator = ">=0.1.1" -traitlets = ">=5.3" - -[package.extras] -cli = ["click", "rich"] -docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] - -[[package]] -name = "jupyter-lsp" -version = "2.2.5" -description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, - {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-server = ">=1.1.2" - -[[package]] -name = "jupyter-server" -version = "2.14.0" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server-2.14.0-py3-none-any.whl", hash = "sha256:fb6be52c713e80e004fac34b35a0990d6d36ba06fd0a2b2ed82b899143a64210"}, - {file = "jupyter_server-2.14.0.tar.gz", hash = "sha256:659154cea512083434fd7c93b7fe0897af7a2fd0b9dd4749282b42eaac4ae677"}, -] - -[package.dependencies] -anyio = ">=3.1.0" -argon2-cffi = ">=21.1" -jinja2 = ">=3.0.3" -jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -jupyter-events = ">=0.9.0" -jupyter-server-terminals = ">=0.4.4" -nbconvert = ">=6.4.4" -nbformat = ">=5.3.0" -overrides = ">=5.0" -packaging = ">=22.0" -prometheus-client = ">=0.9" -pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} -pyzmq = ">=24" -send2trash = ">=1.8.2" -terminado = ">=0.8.3" -tornado = ">=6.2.0" -traitlets = ">=5.6.0" -websocket-client = ">=1.7" - -[package.extras] -docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] - -[[package]] -name = "jupyter-server-terminals" -version = "0.5.3" -description = "A Jupyter Server Extension Providing Terminals." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, - {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, -] - -[package.dependencies] -pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} -terminado = ">=0.8.3" - -[package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] - -[[package]] -name = "jupyterlab" -version = "4.2.5" -description = "JupyterLab computational environment" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321"}, - {file = "jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75"}, -] - -[package.dependencies] -async-lru = ">=1.0.0" -httpx = ">=0.25.0" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -ipykernel = ">=6.5.0" -jinja2 = ">=3.0.3" -jupyter-core = "*" -jupyter-lsp = ">=2.0.0" -jupyter-server = ">=2.4.0,<3" -jupyterlab-server = ">=2.27.1,<3" -notebook-shim = ">=0.2" -packaging = "*" -setuptools = ">=40.1.0" -tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} -tornado = ">=6.2.0" -traitlets = "*" - -[package.extras] -dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.3.5)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.3.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.2)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.1.post2)", "matplotlib (==3.8.3)", "nbconvert (>=7.0.0)", "pandas (==2.2.1)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] -test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] -upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -description = "Pygments theme using JupyterLab CSS variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, - {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, -] - -[[package]] -name = "jupyterlab-server" -version = "2.27.1" -description = "A set of server components for JupyterLab and JupyterLab like applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_server-2.27.1-py3-none-any.whl", hash = "sha256:f5e26156e5258b24d532c84e7c74cc212e203bff93eb856f81c24c16daeecc75"}, - {file = "jupyterlab_server-2.27.1.tar.gz", hash = "sha256:097b5ac709b676c7284ac9c5e373f11930a561f52cd5a86e4fc7e5a9c8a8631d"}, -] - -[package.dependencies] -babel = ">=2.10" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0.3" -json5 = ">=0.9.0" -jsonschema = ">=4.18.0" -jupyter-server = ">=1.21,<3" -packaging = ">=21.3" -requests = ">=2.31" - -[package.extras] -docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] -openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] - -[[package]] -name = "jupyterlab-widgets" -version = "3.0.10" -description = "Jupyter interactive widgets for JupyterLab" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyterlab_widgets-3.0.10-py3-none-any.whl", hash = "sha256:dd61f3ae7a5a7f80299e14585ce6cf3d6925a96c9103c978eda293197730cb64"}, - {file = "jupyterlab_widgets-3.0.10.tar.gz", hash = "sha256:04f2ac04976727e4f9d0fa91cdc2f1ab860f965e504c29dbd6a65c882c9d04c0"}, -] - -[[package]] -name = "kiwisolver" -version = "1.4.5" -description = "A fast implementation of the Cassowary constraint solver" -optional = false -python-versions = ">=3.7" -files = [ - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, - {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, - {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, - {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, - {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, - {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, - {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, - {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, - {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, - {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, - {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, - {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "matplotlib" -version = "3.9.0" -description = "Python plotting package" -optional = false -python-versions = ">=3.9" -files = [ - {file = "matplotlib-3.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2bcee1dffaf60fe7656183ac2190bd630842ff87b3153afb3e384d966b57fe56"}, - {file = "matplotlib-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f988bafb0fa39d1074ddd5bacd958c853e11def40800c5824556eb630f94d3b"}, - {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe428e191ea016bb278758c8ee82a8129c51d81d8c4bc0846c09e7e8e9057241"}, - {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf3978060a106fab40c328778b148f590e27f6fa3cd15a19d6892575bce387d"}, - {file = "matplotlib-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e7f03e5cbbfacdd48c8ea394d365d91ee8f3cae7e6ec611409927b5ed997ee4"}, - {file = "matplotlib-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:13beb4840317d45ffd4183a778685e215939be7b08616f431c7795276e067463"}, - {file = "matplotlib-3.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:063af8587fceeac13b0936c42a2b6c732c2ab1c98d38abc3337e430e1ff75e38"}, - {file = "matplotlib-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a2fa6d899e17ddca6d6526cf6e7ba677738bf2a6a9590d702c277204a7c6152"}, - {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550cdda3adbd596078cca7d13ed50b77879104e2e46392dcd7c75259d8f00e85"}, - {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cce0f31b351e3551d1f3779420cf8f6ec0d4a8cf9c0237a3b549fd28eb4abb"}, - {file = "matplotlib-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c53aeb514ccbbcbab55a27f912d79ea30ab21ee0531ee2c09f13800efb272674"}, - {file = "matplotlib-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5be985db2596d761cdf0c2eaf52396f26e6a64ab46bd8cd810c48972349d1be"}, - {file = "matplotlib-3.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c79f3a585f1368da6049318bdf1f85568d8d04b2e89fc24b7e02cc9b62017382"}, - {file = "matplotlib-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bdd1ecbe268eb3e7653e04f451635f0fb0f77f07fd070242b44c076c9106da84"}, - {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e85a1a6d732f645f1403ce5e6727fd9418cd4574521d5803d3d94911038e5"}, - {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a490715b3b9984fa609116481b22178348c1a220a4499cda79132000a79b4db"}, - {file = "matplotlib-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8146ce83cbc5dc71c223a74a1996d446cd35cfb6a04b683e1446b7e6c73603b7"}, - {file = "matplotlib-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:d91a4ffc587bacf5c4ce4ecfe4bcd23a4b675e76315f2866e588686cc97fccdf"}, - {file = "matplotlib-3.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:616fabf4981a3b3c5a15cd95eba359c8489c4e20e03717aea42866d8d0465956"}, - {file = "matplotlib-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd53c79fd02f1c1808d2cfc87dd3cf4dbc63c5244a58ee7944497107469c8d8a"}, - {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06a478f0d67636554fa78558cfbcd7b9dba85b51f5c3b5a0c9be49010cf5f321"}, - {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c40af649d19c85f8073e25e5806926986806fa6d54be506fbf02aef47d5a89"}, - {file = "matplotlib-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52146fc3bd7813cc784562cb93a15788be0b2875c4655e2cc6ea646bfa30344b"}, - {file = "matplotlib-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:0fc51eaa5262553868461c083d9adadb11a6017315f3a757fc45ec6ec5f02888"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bd4f2831168afac55b881db82a7730992aa41c4f007f1913465fb182d6fb20c0"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:290d304e59be2b33ef5c2d768d0237f5bd132986bdcc66f80bc9bcc300066a03"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff2e239c26be4f24bfa45860c20ffccd118d270c5b5d081fa4ea409b5469fcd"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:af4001b7cae70f7eaacfb063db605280058246de590fa7874f00f62259f2df7e"}, - {file = "matplotlib-3.9.0.tar.gz", hash = "sha256:e6d29ea6c19e34b30fb7d88b7081f869a03014f66fe06d62cc77d5a6ea88ed7a"}, -] - -[package.dependencies] -contourpy = ">=1.0.1" -cycler = ">=0.10" -fonttools = ">=4.22.0" -importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} -kiwisolver = ">=1.3.1" -numpy = ">=1.23" -packaging = ">=20.0" -pillow = ">=8" -pyparsing = ">=2.3.1" -python-dateutil = ">=2.7" - -[package.extras] -dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "mistune" -version = "3.0.2" -description = "A sane and fast Markdown parser with useful plugins and renderers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, -] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nbclient" -version = "0.10.0" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1" -traitlets = ">=5.4" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.16.4" -description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, - {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -bleach = "!=5.0.0" -defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<4" -nbclient = ">=0.5.0" -nbformat = ">=5.7" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.1" - -[package.extras] -all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] -docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["pyqtwebengine (>=5.15)"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] -webpdf = ["playwright"] - -[[package]] -name = "nbformat" -version = "5.10.4" -description = "The Jupyter Notebook format" -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, - {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, -] - -[package.dependencies] -fastjsonschema = ">=2.15" -jsonschema = ">=2.6" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -traitlets = ">=5.1" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["pep440", "pre-commit", "pytest", "testpath"] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "notebook" -version = "7.2.2" -description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -optional = false -python-versions = ">=3.8" -files = [ - {file = "notebook-7.2.2-py3-none-any.whl", hash = "sha256:c89264081f671bc02eec0ed470a627ed791b9156cad9285226b31611d3e9fe1c"}, - {file = "notebook-7.2.2.tar.gz", hash = "sha256:2ef07d4220421623ad3fe88118d687bc0450055570cdd160814a59cf3a1c516e"}, -] - -[package.dependencies] -jupyter-server = ">=2.4.0,<3" -jupyterlab = ">=4.2.0,<4.3" -jupyterlab-server = ">=2.27.1,<3" -notebook-shim = ">=0.2,<0.3" -tornado = ">=6.2.0" - -[package.extras] -dev = ["hatch", "pre-commit"] -docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] - -[[package]] -name = "notebook-shim" -version = "0.2.4" -description = "A shim layer for notebook traits and config" -optional = false -python-versions = ">=3.7" -files = [ - {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, - {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, -] - -[package.dependencies] -jupyter-server = ">=1.8,<3" - -[package.extras] -test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - -[[package]] -name = "packaging" -version = "24.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, -] - -[[package]] -name = "pandas" -version = "2.2.2" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandocfilters" -version = "1.5.1" -description = "Utilities for writing pandoc filters in python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, -] - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pillow" -version = "10.3.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, - {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, - {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, - {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, - {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, - {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, - {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, - {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, - {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, - {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, - {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, - {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, - {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, - {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, - {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, - {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, - {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions"] -xmp = ["defusedxml"] - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "plum-dispatch" -version = "1.7.4" -description = "Multiple dispatch in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "plum-dispatch-1.7.4.tar.gz", hash = "sha256:1c1d15b2842b5fa98405fd3dff6fad4887bdc77b60bd200e209d76ebfe9990fe"}, - {file = "plum_dispatch-1.7.4-py3-none-any.whl", hash = "sha256:c40dbeab269bbbf972ce0dbc078380da19ebaee1a370a2c564e1814a11bde216"}, -] - -[[package]] -name = "plum-dispatch" -version = "2.3.6" -description = "Multiple dispatch in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "plum_dispatch-2.3.6-py3-none-any.whl", hash = "sha256:ad0e6f1aa40f6b0bd286e0c24bc5928545095e61dc7b07d19455a7f3b064c907"}, - {file = "plum_dispatch-2.3.6.tar.gz", hash = "sha256:664e79003076aac754127fe9d667f43cf22e448a179d46ddfe298f18c012551d"}, -] - -[package.dependencies] -beartype = ">=0.16.2" -rich = ">=10.0" -typing-extensions = {version = "*", markers = "python_version <= \"3.10\""} - -[package.extras] -dev = ["black (==23.9.0)", "build", "coveralls", "ghp-import", "ipython", "jupyter-book", "mypy", "numpy", "pre-commit", "pyright (>=1.1.331)", "pytest (>=6)", "pytest-cov", "ruff (==0.1.0)", "sybil", "tox", "wheel"] - -[[package]] -name = "prometheus-client" -version = "0.20.0" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.8" -files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.43" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "psutil" -version = "5.9.8" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.2" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydantic" -version = "2.7.1" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"}, - {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.18.2" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.18.2" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"}, - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"}, - {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"}, - {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"}, - {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"}, - {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"}, - {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"}, - {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"}, - {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"}, - {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"}, - {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"}, - {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"}, - {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"}, - {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"}, - {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyparsing" -version = "3.1.2" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pyproj" -version = "3.6.1" -description = "Python interface to PROJ (cartographic projections and coordinate transformations library)" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pyproj-3.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab7aa4d9ff3c3acf60d4b285ccec134167a948df02347585fdd934ebad8811b4"}, - {file = "pyproj-3.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4bc0472302919e59114aa140fd7213c2370d848a7249d09704f10f5b062031fe"}, - {file = "pyproj-3.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5279586013b8d6582e22b6f9e30c49796966770389a9d5b85e25a4223286cd3f"}, - {file = "pyproj-3.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fafd1f3eb421694857f254a9bdbacd1eb22fc6c24ca74b136679f376f97d35"}, - {file = "pyproj-3.6.1-cp310-cp310-win32.whl", hash = "sha256:c41e80ddee130450dcb8829af7118f1ab69eaf8169c4bf0ee8d52b72f098dc2f"}, - {file = "pyproj-3.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:db3aedd458e7f7f21d8176f0a1d924f1ae06d725228302b872885a1c34f3119e"}, - {file = "pyproj-3.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ebfbdbd0936e178091309f6cd4fcb4decd9eab12aa513cdd9add89efa3ec2882"}, - {file = "pyproj-3.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:447db19c7efad70ff161e5e46a54ab9cc2399acebb656b6ccf63e4bc4a04b97a"}, - {file = "pyproj-3.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7e13c40183884ec7f94eb8e0f622f08f1d5716150b8d7a134de48c6110fee85"}, - {file = "pyproj-3.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65ad699e0c830e2b8565afe42bd58cc972b47d829b2e0e48ad9638386d994915"}, - {file = "pyproj-3.6.1-cp311-cp311-win32.whl", hash = "sha256:8b8acc31fb8702c54625f4d5a2a6543557bec3c28a0ef638778b7ab1d1772132"}, - {file = "pyproj-3.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:38a3361941eb72b82bd9a18f60c78b0df8408416f9340521df442cebfc4306e2"}, - {file = "pyproj-3.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1e9fbaf920f0f9b4ee62aab832be3ae3968f33f24e2e3f7fbb8c6728ef1d9746"}, - {file = "pyproj-3.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d227a865356f225591b6732430b1d1781e946893789a609bb34f59d09b8b0f8"}, - {file = "pyproj-3.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83039e5ae04e5afc974f7d25ee0870a80a6bd6b7957c3aca5613ccbe0d3e72bf"}, - {file = "pyproj-3.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb059ba3bced6f6725961ba758649261d85ed6ce670d3e3b0a26e81cf1aa8d"}, - {file = "pyproj-3.6.1-cp312-cp312-win32.whl", hash = "sha256:2d6ff73cc6dbbce3766b6c0bce70ce070193105d8de17aa2470009463682a8eb"}, - {file = "pyproj-3.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:7a27151ddad8e1439ba70c9b4b2b617b290c39395fa9ddb7411ebb0eb86d6fb0"}, - {file = "pyproj-3.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ba1f9b03d04d8cab24d6375609070580a26ce76eaed54631f03bab00a9c737b"}, - {file = "pyproj-3.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18faa54a3ca475bfe6255156f2f2874e9a1c8917b0004eee9f664b86ccc513d3"}, - {file = "pyproj-3.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd43bd9a9b9239805f406fd82ba6b106bf4838d9ef37c167d3ed70383943ade1"}, - {file = "pyproj-3.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50100b2726a3ca946906cbaa789dd0749f213abf0cbb877e6de72ca7aa50e1ae"}, - {file = "pyproj-3.6.1-cp39-cp39-win32.whl", hash = "sha256:9274880263256f6292ff644ca92c46d96aa7e57a75c6df3f11d636ce845a1877"}, - {file = "pyproj-3.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:36b64c2cb6ea1cc091f329c5bd34f9c01bb5da8c8e4492c709bda6a09f96808f"}, - {file = "pyproj-3.6.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd93c1a0c6c4aedc77c0fe275a9f2aba4d59b8acf88cebfc19fe3c430cfabf4f"}, - {file = "pyproj-3.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6420ea8e7d2a88cb148b124429fba8cd2e0fae700a2d96eab7083c0928a85110"}, - {file = "pyproj-3.6.1.tar.gz", hash = "sha256:44aa7c704c2b7d8fb3d483bbf75af6cb2350d30a63b144279a09b75fead501bf"}, -] - -[package.dependencies] -certifi = "*" - -[[package]] -name = "pytest" -version = "8.2.1" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"}, - {file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2.0" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-cov" -version = "5.0.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, - {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-json-logger" -version = "2.0.7" -description = "A python library adding a json log formatter" -optional = false -python-versions = ">=3.6" -files = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pywinpty" -version = "2.0.13" -description = "Pseudo terminal support for Windows from Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, - {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, - {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, - {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, - {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, - {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "pyzmq" -version = "26.0.3" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, - {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, - {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, - {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, - {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, - {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, - {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, - {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "qtconsole" -version = "5.5.2" -description = "Jupyter Qt console" -optional = false -python-versions = ">=3.8" -files = [ - {file = "qtconsole-5.5.2-py3-none-any.whl", hash = "sha256:42d745f3d05d36240244a04e1e1ec2a86d5d9b6edb16dbdef582ccb629e87e0b"}, - {file = "qtconsole-5.5.2.tar.gz", hash = "sha256:6b5fb11274b297463706af84dcbbd5c92273b1f619e6d25d08874b0a88516989"}, -] - -[package.dependencies] -ipykernel = ">=4.1" -jupyter-client = ">=4.1" -jupyter-core = "*" -packaging = "*" -pygments = "*" -pyzmq = ">=17.1" -qtpy = ">=2.4.0" -traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" - -[package.extras] -doc = ["Sphinx (>=1.3)"] -test = ["flaky", "pytest", "pytest-qt"] - -[[package]] -name = "qtpy" -version = "2.4.1" -description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." -optional = false -python-versions = ">=3.7" -files = [ - {file = "QtPy-2.4.1-py3-none-any.whl", hash = "sha256:1c1d8c4fa2c884ae742b069151b0abe15b3f70491f3972698c683b8e38de839b"}, - {file = "QtPy-2.4.1.tar.gz", hash = "sha256:a5a15ffd519550a1361bdc56ffc07fda56a6af7292f17c7b395d4083af632987"}, -] - -[package.dependencies] -packaging = "*" - -[package.extras] -test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] - -[[package]] -name = "quartodoc" -version = "0.7.2" -description = "Generate API documentation with Quarto." -optional = false -python-versions = ">=3.9" -files = [ - {file = "quartodoc-0.7.2-py3-none-any.whl", hash = "sha256:adff974296c013308f4e06cadea8b71b90d334dcb1510d2bd0ef9cea06d8be98"}, - {file = "quartodoc-0.7.2.tar.gz", hash = "sha256:58f98c88065ce0b4a857f13282d1e38bb2cd4757b481a4c6279bdca89ba5640b"}, -] - -[package.dependencies] -click = "*" -griffe = ">=0.33" -importlib-metadata = ">=5.1.0" -importlib-resources = ">=5.10.2" -plum-dispatch = [ - {version = "<2.0.0", markers = "python_version < \"3.10\""}, - {version = ">2.0.0", markers = "python_version >= \"3.10\""}, -] -pydantic = "*" -pyyaml = "*" -sphobjinv = ">=2.3.1" -tabulate = ">=0.9.0" -typing-extensions = ">=4.4.0" -watchdog = ">=3.0.0" - -[package.extras] -dev = ["jupyterlab", "jupytext", "pre-commit", "pytest", "syrupy"] - -[[package]] -name = "rasterio" -version = "1.3.10" -description = "Fast and direct raster I/O for use with Numpy and SciPy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rasterio-1.3.10-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:2ef27c3eff6f44f8b5d5de228003367c1843593edf648d85c0dc1319c00dc57d"}, - {file = "rasterio-1.3.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c711b497e9ef0c4f5e1c01e34ba910708e066e1c4a69c25df18d1bcc04481287"}, - {file = "rasterio-1.3.10-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:d1ac85857144cb8075e332e9d908b65426d30ddc1f59f7a04bcf6ed6fd3c0d47"}, - {file = "rasterio-1.3.10-cp310-cp310-win_amd64.whl", hash = "sha256:ef8a496740df1e68f7a3d3449aa3be9c3210c22f4bb78a4a9e1c290183abd9b1"}, - {file = "rasterio-1.3.10-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:97d867cada29f16cb83f1743217f775f8b982676fcdda77671d25abb26698159"}, - {file = "rasterio-1.3.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:505b3e659eb3b137192c25233bf7954bc4997b1a474bae9e129fbd5ac2619404"}, - {file = "rasterio-1.3.10-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:30f27e309a14a70c821d10a0ea18b110968dc2e2186b06a900aebd92094f4e00"}, - {file = "rasterio-1.3.10-cp311-cp311-win_amd64.whl", hash = "sha256:cbb2eea127328302f9e3158a000363a7d9eea22537378dee4f824a7fa2d78c05"}, - {file = "rasterio-1.3.10-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:3a9c4fb63e050e11bcd23e53f084ca186b445f976df1f70e7abd851c4072837f"}, - {file = "rasterio-1.3.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c7ddca79444fd3b933f4cd1a1773e9f7839d0ce5d76e600bdf92ee9a79b95f8"}, - {file = "rasterio-1.3.10-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:f9cd757e11cfb07ef39b1cc79a32497bf22aff7fec41fe330b868cb3043b4db5"}, - {file = "rasterio-1.3.10-cp312-cp312-win_amd64.whl", hash = "sha256:7e653968f64840654d277e0f86f8666ed8f3030ba36fa865f420f9bc38d619ee"}, - {file = "rasterio-1.3.10-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7a22c0e0cf07dbed6576faf9a49bc4afa1afedd5a14441b64a3d3dd6d10dc274"}, - {file = "rasterio-1.3.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d29d30c2271fa265913bd3db93fa213d3a0894362ec704e7273cf30443098a90"}, - {file = "rasterio-1.3.10-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:287e8d0d0472c778aa0b6392e9c00894a80f2bace28fa6eddb76c0a895097947"}, - {file = "rasterio-1.3.10-cp38-cp38-win_amd64.whl", hash = "sha256:a420e5f25108b1c92c5d071cfd6518b3766f20a6eddb1b322d06c3d46a89fab6"}, - {file = "rasterio-1.3.10-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:73ea4d0e584f696ef115601bbb97ba8d2b68a67c2bb3b40999414d31b6c7cf89"}, - {file = "rasterio-1.3.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6eece6420d7d6ef9b9830633b8fcd15e86b8702cb13419abe251c16ca502cf3"}, - {file = "rasterio-1.3.10-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:0bbd62b45a35cab53cb7fe72419e823e47ab31ee2d055af8e21dc7f37fe5ed6c"}, - {file = "rasterio-1.3.10-cp39-cp39-win_amd64.whl", hash = "sha256:450f2bd45335308829da90566fbcbdb8e8aa0251a9d1f6ebb60667855dfb7554"}, - {file = "rasterio-1.3.10.tar.gz", hash = "sha256:ce182c735b4f9e8735d90600607ecab15ef895eb8aa660bf665751529477e326"}, -] - -[package.dependencies] -affine = "*" -attrs = "*" -certifi = "*" -click = ">=4.0" -click-plugins = "*" -cligj = ">=0.5" -importlib-metadata = {version = "*", markers = "python_version < \"3.10\""} -numpy = "*" -setuptools = "*" -snuggs = ">=1.4.1" - -[package.extras] -all = ["boto3 (>=1.2.4)", "ghp-import", "hypothesis", "ipython (>=2.0)", "matplotlib", "numpydoc", "packaging", "pytest (>=2.8.2)", "pytest-cov (>=2.2.0)", "shapely", "sphinx", "sphinx-rtd-theme"] -docs = ["ghp-import", "numpydoc", "sphinx", "sphinx-rtd-theme"] -ipython = ["ipython (>=2.0)"] -plot = ["matplotlib"] -s3 = ["boto3 (>=1.2.4)"] -test = ["boto3 (>=1.2.4)", "hypothesis", "packaging", "pytest (>=2.8.2)", "pytest-cov (>=2.2.0)", "shapely"] - -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "requests" -version = "2.32.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.0-py3-none-any.whl", hash = "sha256:f2c3881dddb70d056c5bd7600a4fae312b2a300e39be6a118d30b90bd27262b5"}, - {file = "requests-2.32.0.tar.gz", hash = "sha256:fa5490319474c82ef1d2c9bc459d3652e3ae4ef4c4ebdd18a21145a47ca4b6b8"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -description = "Pure python rfc3986 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] - -[[package]] -name = "rich" -version = "13.7.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "rpds-py" -version = "0.18.1" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"}, - {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"}, - {file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"}, - {file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"}, - {file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"}, - {file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"}, - {file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"}, - {file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"}, - {file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"}, - {file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"}, - {file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"}, - {file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"}, - {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"}, -] - -[[package]] -name = "scikit-learn" -version = "1.5.0" -description = "A set of python modules for machine learning and data mining" -optional = false -python-versions = ">=3.9" -files = [ - {file = "scikit_learn-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12e40ac48555e6b551f0a0a5743cc94cc5a765c9513fe708e01f0aa001da2801"}, - {file = "scikit_learn-1.5.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f405c4dae288f5f6553b10c4ac9ea7754d5180ec11e296464adb5d6ac68b6ef5"}, - {file = "scikit_learn-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df8ccabbf583315f13160a4bb06037bde99ea7d8211a69787a6b7c5d4ebb6fc3"}, - {file = "scikit_learn-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c75ea812cd83b1385bbfa94ae971f0d80adb338a9523f6bbcb5e0b0381151d4"}, - {file = "scikit_learn-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:a90c5da84829a0b9b4bf00daf62754b2be741e66b5946911f5bdfaa869fcedd6"}, - {file = "scikit_learn-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a65af2d8a6cce4e163a7951a4cfbfa7fceb2d5c013a4b593686c7f16445cf9d"}, - {file = "scikit_learn-1.5.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:4c0c56c3005f2ec1db3787aeaabefa96256580678cec783986836fc64f8ff622"}, - {file = "scikit_learn-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f77547165c00625551e5c250cefa3f03f2fc92c5e18668abd90bfc4be2e0bff"}, - {file = "scikit_learn-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:118a8d229a41158c9f90093e46b3737120a165181a1b58c03461447aa4657415"}, - {file = "scikit_learn-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:a03b09f9f7f09ffe8c5efffe2e9de1196c696d811be6798ad5eddf323c6f4d40"}, - {file = "scikit_learn-1.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:460806030c666addee1f074788b3978329a5bfdc9b7d63e7aad3f6d45c67a210"}, - {file = "scikit_learn-1.5.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:1b94d6440603752b27842eda97f6395f570941857456c606eb1d638efdb38184"}, - {file = "scikit_learn-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d82c2e573f0f2f2f0be897e7a31fcf4e73869247738ab8c3ce7245549af58ab8"}, - {file = "scikit_learn-1.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3a10e1d9e834e84d05e468ec501a356226338778769317ee0b84043c0d8fb06"}, - {file = "scikit_learn-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:855fc5fa8ed9e4f08291203af3d3e5fbdc4737bd617a371559aaa2088166046e"}, - {file = "scikit_learn-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:40fb7d4a9a2db07e6e0cae4dc7bdbb8fada17043bac24104d8165e10e4cff1a2"}, - {file = "scikit_learn-1.5.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:47132440050b1c5beb95f8ba0b2402bbd9057ce96ec0ba86f2f445dd4f34df67"}, - {file = "scikit_learn-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174beb56e3e881c90424e21f576fa69c4ffcf5174632a79ab4461c4c960315ac"}, - {file = "scikit_learn-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261fe334ca48f09ed64b8fae13f9b46cc43ac5f580c4a605cbb0a517456c8f71"}, - {file = "scikit_learn-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:057b991ac64b3e75c9c04b5f9395eaf19a6179244c089afdebaad98264bff37c"}, - {file = "scikit_learn-1.5.0.tar.gz", hash = "sha256:789e3db01c750ed6d496fa2db7d50637857b451e57bcae863bff707c1247bef7"}, -] - -[package.dependencies] -joblib = ">=1.2.0" -numpy = ">=1.19.5" -scipy = ">=1.6.0" -threadpoolctl = ">=3.1.0" - -[package.extras] -benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] -build = ["cython (>=3.0.10)", "meson-python (>=0.15.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] -docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] -examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] -install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] -maintenance = ["conda-lock (==2.5.6)"] -tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] - -[[package]] -name = "scipy" -version = "1.13.0" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, - {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, - {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, - {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, - {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, - {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, - {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, - {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, - {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, - {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, - {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, - {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, - {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, - {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, - {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, - {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, - {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, - {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, - {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, - {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, - {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, - {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, - {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, - {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, - {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, -] - -[package.dependencies] -numpy = ">=1.22.4,<2.3" - -[package.extras] -dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] -doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] -test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "send2trash" -version = "1.8.3" -description = "Send file to trash natively under Mac OS X, Windows and Linux" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, - {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, -] - -[package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] - -[[package]] -name = "setuptools" -version = "70.0.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "shapely" -version = "2.0.4" -description = "Manipulation and analysis of geometric objects" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shapely-2.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:011b77153906030b795791f2fdfa2d68f1a8d7e40bce78b029782ade3afe4f2f"}, - {file = "shapely-2.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9831816a5d34d5170aa9ed32a64982c3d6f4332e7ecfe62dc97767e163cb0b17"}, - {file = "shapely-2.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5c4849916f71dc44e19ed370421518c0d86cf73b26e8656192fcfcda08218fbd"}, - {file = "shapely-2.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841f93a0e31e4c64d62ea570d81c35de0f6cea224568b2430d832967536308e6"}, - {file = "shapely-2.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b4431f522b277c79c34b65da128029a9955e4481462cbf7ebec23aab61fc58"}, - {file = "shapely-2.0.4-cp310-cp310-win32.whl", hash = "sha256:92a41d936f7d6743f343be265ace93b7c57f5b231e21b9605716f5a47c2879e7"}, - {file = "shapely-2.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:30982f79f21bb0ff7d7d4a4e531e3fcaa39b778584c2ce81a147f95be1cd58c9"}, - {file = "shapely-2.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de0205cb21ad5ddaef607cda9a3191eadd1e7a62a756ea3a356369675230ac35"}, - {file = "shapely-2.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7d56ce3e2a6a556b59a288771cf9d091470116867e578bebced8bfc4147fbfd7"}, - {file = "shapely-2.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:58b0ecc505bbe49a99551eea3f2e8a9b3b24b3edd2a4de1ac0dc17bc75c9ec07"}, - {file = "shapely-2.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:790a168a808bd00ee42786b8ba883307c0e3684ebb292e0e20009588c426da47"}, - {file = "shapely-2.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4310b5494271e18580d61022c0857eb85d30510d88606fa3b8314790df7f367d"}, - {file = "shapely-2.0.4-cp311-cp311-win32.whl", hash = "sha256:63f3a80daf4f867bd80f5c97fbe03314348ac1b3b70fb1c0ad255a69e3749879"}, - {file = "shapely-2.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:c52ed79f683f721b69a10fb9e3d940a468203f5054927215586c5d49a072de8d"}, - {file = "shapely-2.0.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5bbd974193e2cc274312da16b189b38f5f128410f3377721cadb76b1e8ca5328"}, - {file = "shapely-2.0.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:41388321a73ba1a84edd90d86ecc8bfed55e6a1e51882eafb019f45895ec0f65"}, - {file = "shapely-2.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0776c92d584f72f1e584d2e43cfc5542c2f3dd19d53f70df0900fda643f4bae6"}, - {file = "shapely-2.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c75c98380b1ede1cae9a252c6dc247e6279403fae38c77060a5e6186c95073ac"}, - {file = "shapely-2.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3e700abf4a37b7b8b90532fa6ed5c38a9bfc777098bc9fbae5ec8e618ac8f30"}, - {file = "shapely-2.0.4-cp312-cp312-win32.whl", hash = "sha256:4f2ab0faf8188b9f99e6a273b24b97662194160cc8ca17cf9d1fb6f18d7fb93f"}, - {file = "shapely-2.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:03152442d311a5e85ac73b39680dd64a9892fa42bb08fd83b3bab4fe6999bfa0"}, - {file = "shapely-2.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:994c244e004bc3cfbea96257b883c90a86e8cbd76e069718eb4c6b222a56f78b"}, - {file = "shapely-2.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05ffd6491e9e8958b742b0e2e7c346635033d0a5f1a0ea083547fcc854e5d5cf"}, - {file = "shapely-2.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbdc1140a7d08faa748256438291394967aa54b40009f54e8d9825e75ef6113"}, - {file = "shapely-2.0.4-cp37-cp37m-win32.whl", hash = "sha256:5af4cd0d8cf2912bd95f33586600cac9c4b7c5053a036422b97cfe4728d2eb53"}, - {file = "shapely-2.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:464157509ce4efa5ff285c646a38b49f8c5ef8d4b340f722685b09bb033c5ccf"}, - {file = "shapely-2.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:489c19152ec1f0e5c5e525356bcbf7e532f311bff630c9b6bc2db6f04da6a8b9"}, - {file = "shapely-2.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b79bbd648664aa6f44ef018474ff958b6b296fed5c2d42db60078de3cffbc8aa"}, - {file = "shapely-2.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:674d7baf0015a6037d5758496d550fc1946f34bfc89c1bf247cabdc415d7747e"}, - {file = "shapely-2.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cd4ccecc5ea5abd06deeaab52fcdba372f649728050c6143cc405ee0c166679"}, - {file = "shapely-2.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5cdcbbe3080181498931b52a91a21a781a35dcb859da741c0345c6402bf00c"}, - {file = "shapely-2.0.4-cp38-cp38-win32.whl", hash = "sha256:55a38dcd1cee2f298d8c2ebc60fc7d39f3b4535684a1e9e2f39a80ae88b0cea7"}, - {file = "shapely-2.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:ec555c9d0db12d7fd777ba3f8b75044c73e576c720a851667432fabb7057da6c"}, - {file = "shapely-2.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9103abd1678cb1b5f7e8e1af565a652e036844166c91ec031eeb25c5ca8af0"}, - {file = "shapely-2.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:263bcf0c24d7a57c80991e64ab57cba7a3906e31d2e21b455f493d4aab534aaa"}, - {file = "shapely-2.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddf4a9bfaac643e62702ed662afc36f6abed2a88a21270e891038f9a19bc08fc"}, - {file = "shapely-2.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:485246fcdb93336105c29a5cfbff8a226949db37b7473c89caa26c9bae52a242"}, - {file = "shapely-2.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8de4578e838a9409b5b134a18ee820730e507b2d21700c14b71a2b0757396acc"}, - {file = "shapely-2.0.4-cp39-cp39-win32.whl", hash = "sha256:9dab4c98acfb5fb85f5a20548b5c0abe9b163ad3525ee28822ffecb5c40e724c"}, - {file = "shapely-2.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:31c19a668b5a1eadab82ff070b5a260478ac6ddad3a5b62295095174a8d26398"}, - {file = "shapely-2.0.4.tar.gz", hash = "sha256:5dc736127fac70009b8d309a0eeb74f3e08979e530cf7017f2f507ef62e6cfb8"}, -] - -[package.dependencies] -numpy = ">=1.14,<3" - -[package.extras] -docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "snuggs" -version = "1.4.7" -description = "Snuggs are s-expressions for Numpy" -optional = false -python-versions = "*" -files = [ - {file = "snuggs-1.4.7-py3-none-any.whl", hash = "sha256:988dde5d4db88e9d71c99457404773dabcc7a1c45971bfbe81900999942d9f07"}, - {file = "snuggs-1.4.7.tar.gz", hash = "sha256:501cf113fe3892e14e2fee76da5cd0606b7e149c411c271898e6259ebde2617b"}, -] - -[package.dependencies] -numpy = "*" -pyparsing = ">=2.1.6" - -[package.extras] -test = ["hypothesis", "pytest"] - -[[package]] -name = "soupsieve" -version = "2.5" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, -] - -[[package]] -name = "sphobjinv" -version = "2.3.1" -description = "Sphinx objects.inv Inspection/Manipulation Tool" -optional = false -python-versions = ">=3.6" -files = [ - {file = "sphobjinv-2.3.1-py3-none-any.whl", hash = "sha256:f3efe68bb0ba6e32cb50df064fe6349b8f94681589b400dea753a2860dd576b5"}, - {file = "sphobjinv-2.3.1.tar.gz", hash = "sha256:1442a47fc93587a0177be95346904e388ef85a8366f90a1835a7c3eeeb122eb7"}, -] - -[package.dependencies] -attrs = ">=19.2" -certifi = "*" -jsonschema = ">=3.0" - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] - -[package.extras] -widechars = ["wcwidth"] - -[[package]] -name = "terminado" -version = "0.18.1" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, - {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, -] - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] -typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] - -[[package]] -name = "threadpoolctl" -version = "3.5.0" -description = "threadpoolctl" -optional = false -python-versions = ">=3.8" -files = [ - {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, - {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, -] - -[[package]] -name = "tinycss2" -version = "1.3.0" -description = "A tiny CSS parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, - {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, -] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["pytest", "ruff"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tornado" -version = "6.4.1" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, -] - -[[package]] -name = "tqdm" -version = "4.66.4" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.14.3" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "types-python-dateutil" -version = "2.9.0.20240316" -description = "Typing stubs for python-dateutil" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, - {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, -] - -[[package]] -name = "typing-extensions" -version = "4.11.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "uri-template" -version = "1.3.0" -description = "RFC 6570 URI Template Processor" -optional = false -python-versions = ">=3.7" -files = [ - {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, - {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, -] - -[package.extras] -dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "watchdog" -version = "4.0.0" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.8" -files = [ - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, - {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, - {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, - {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, - {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, - {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, - {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, - {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "webcolors" -version = "1.13" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.7" -files = [ - {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, - {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, -] - -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "widgetsnbextension" -version = "4.0.10" -description = "Jupyter interactive widgets for Jupyter Notebook" -optional = false -python-versions = ">=3.7" -files = [ - {file = "widgetsnbextension-4.0.10-py3-none-any.whl", hash = "sha256:d37c3724ec32d8c48400a435ecfa7d3e259995201fbefa37163124a9fcb393cc"}, - {file = "widgetsnbextension-4.0.10.tar.gz", hash = "sha256:64196c5ff3b9a9183a8e699a4227fb0b7002f252c814098e66c4d1cd0644688f"}, -] - -[[package]] -name = "zipp" -version = "3.19.1" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, - {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9" -content-hash = "bf7c6f5381b05736028478841d632497697d24e516a232bb7cc6570223c68449" diff --git a/pyproject.toml b/pyproject.toml index 78bea26..311b278 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,37 +1,47 @@ -[tool.poetry] -name = "pyspatialml" -version = "0.22.1" -description = "Machine learning classification and regression modelling for spatial raster data." -authors = ["Steven Pawley "] -license = "GPL-3.0-or-later" -readme = "README.md" -homepage = "https://stevenpawley.github.io/Pyspatialml/" -repository = "https://github.com/stevenpawley/Pyspatialml" - -[tool.poetry.dependencies] -python = "^3.9" -tqdm = "^4.66.4" -rasterio = "^1.3.10" -geopandas = "^0.14.4" -numpy = "^1.26.4" -scipy = "^1.13.0" -shapely = "^2.0.4" -pandas = "^2.2.2" -matplotlib = "^3.9.0" -scikit-learn = "^1.4.2" -affine = "^2.4.0" - -[tool.poetry.group.dev.dependencies] -quartodoc = "^0.7.2" -jupyter = "^1.0.0" -ipykernel = "^6.29.4" -black = "^24.4.2" - -[tool.poetry.group.test.dependencies] -pytest = "^8.2.0" -pytest-cov = "^5.0.0" -coverage = "^7.5.3" - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +# pyproject.toml +[tool.setuptools] +packages = ["pyspatialml"] + +[project] +name = "pyspatialml" +version = "0.22.1" +description = "Machine learning classification and regression modelling for spatial raster data." +authors = [ + {name = "Steven Pawley", email = "dr.stevenpawley@gmail.com"} +] +license = {file = "LICENSE"} +readme = "README.md" +requires-python = ">= 3.9" +dependencies = [ + "tqdm>=4.66.4", + "rasterio>=1.3.10", + "geopandas>=0.14.4", + "numpy>=1.26.4", + "scipy>=1.13.0", + "shapely>=2.0.4", + "pandas>=2.2.2", + "matplotlib>=3.9.0", + "scikit-learn>=1.4.2", + "affine>=2.4.0" +] + +[project.optional-dependencies] +documentation = [ + "quartodoc>=0.7.2", + "jupyter>=1.0.0", + "ipykernel>=6.29.4", + "black>=24.4.2" +] +testing = [ + "pytest>=8.2.0", + "pytest-cov>=5.0.0", + "coverage>=7.5.3" +] + +[project.urls] +homepage = "https://stevenpawley.github.io/Pyspatialml/" +repository = "https://github.com/stevenpawley/Pyspatialml" diff --git a/pyspatialml/__init__.py b/pyspatialml/__init__.py index c5ff929..2b9d722 100644 --- a/pyspatialml/__init__.py +++ b/pyspatialml/__init__.py @@ -1,2 +1,2 @@ -from .raster import Raster -from .rasterlayer import RasterLayer +from .raster import Raster +from .rasterlayer import RasterLayer diff --git a/pyspatialml/_plotting.py b/pyspatialml/_plotting.py index c6357de..f68cd94 100644 --- a/pyspatialml/_plotting.py +++ b/pyspatialml/_plotting.py @@ -1,388 +1,388 @@ -import math - -import rasterio -import matplotlib as mpl -import matplotlib.pyplot as plt -import numpy as np -from mpl_toolkits.axes_grid1 import make_axes_locatable -import matplotlib.ticker as mticker - - -def discrete_cmap(N, base_cmap=None): - """Create an N-bin discrete colormap from the specified input map. - - Source: - https://gist.github.com/jakevdp/91077b0cae40f8f8244a - - Parameters - ---------- - N : int - The number of colors in the colormap - - base_cmap : str - The name of the matplotlib cmap to convert into a discrete map. - - Returns - ------- - matplotlib.cmap - The cmap converted to a discrete map. - """ - - base = plt.cm.get_cmap(base_cmap) - color_list = base(np.linspace(0, 1, N)) - cmap_name = base.name + str(N) - - return base.from_list(cmap_name, color_list, N) - - -class RasterPlotMixin: - def plot( - self, - cmap=None, - norm=None, - figsize=None, - out_shape=(500, 500), - title_fontsize=8, - label_fontsize=6, - legend_fontsize=6, - names=None, - fig_kwds=None, - legend_kwds=None, - subplots_kwds=None, - ): - """Plot a Raster object as a raster matrix - - Parameters - ---------- - cmap : str (opt), default=None - Specify a single cmap to apply to all of the RasterLayers. - This overides the cmap attribute of each RasterLayer. - - norm : matplotlib.colors.Normalize (opt), default=None - A matplotlib.colors.Normalize to apply to all of the - RasterLayers. This overides the norm attribute of each - RasterLayer. - - figsize : tuple (opt), default=None - Size of the resulting matplotlib.figure.Figure. - - out_shape : tuple, default=(500, 500) - Number of rows, cols to read from the raster datasets for - plotting. - - title_fontsize : any number, default=8 - Size in pts of titles. - - label_fontsize : any number, default=6 - Size in pts of axis ticklabels. - - legend_fontsize : any number, default=6 - Size in pts of legend ticklabels. - - names : list (opt), default=None - Optionally supply a list of names for each RasterLayer to - override the default layer names for the titles. - - fig_kwds : dict (opt), default=None - Additional arguments to pass to the - matplotlib.pyplot.figure call when creating the figure - object. - - legend_kwds : dict (opt), default=None - Additional arguments to pass to the - matplotlib.pyplot.colorbar call when creating the colorbar - object. - - subplots_kwds : dict (opt), default=None - Additional arguments to pass to the - matplotlib.pyplot.subplots_adjust function. These are used to - control the spacing and position of each subplot, and can - include{left=None, bottom=None, right=None, top=None, - wspace=None, hspace=None}. - - Returns - ------- - axs : numpy.ndarray - array of matplotlib.axes._subplots.AxesSubplot or a single - matplotlib.axes._subplots.AxesSubplot if Raster object - contains only a single layer. - """ - - # some checks - if norm: - if not isinstance(norm, mpl.colors.Normalize): - raise AttributeError( - "norm argument should be a matplotlib.colors.Normalize object" - ) - - if cmap: - cmaps = [cmap for i in self.iloc] - else: - cmaps = [i.cmap for i in self.iloc] - - if norm: - norms = [norm for i in self.iloc] - else: - norms = [i.norm for i in self.iloc] - - if names is None: - names = self.names - else: - if len(names) != self.count: - raise AttributeError( - "arguments 'names' needs to be the same length as the number of " - "RasterLayer objects " - ) - - if fig_kwds is None: - fig_kwds = {} - - if legend_kwds is None: - legend_kwds = {} - - if subplots_kwds is None: - subplots_kwds = {} - - if figsize: - fig_kwds["figsize"] = figsize - - # estimate required number of rows and columns in figure - rows = int(np.sqrt(self.count)) - cols = int(math.ceil(np.sqrt(self.count))) - - if rows * cols < self.count: - rows += 1 - - fig, axs = plt.subplots(rows, cols, **fig_kwds) - - # axs.flat is an iterator over the row-order flattened axs array - if isinstance(axs, np.ndarray): - for ax, n, cmap, norm, name in zip( - axs.flat, range(self.count), cmaps, norms, names - ): - - arr = self.iloc[n].read(masked=True, out_shape=out_shape) - ax.set_title(name, fontsize=title_fontsize, y=1.00) - - im = ax.imshow( - arr, - extent=[ - self.bounds.left, - self.bounds.right, - self.bounds.bottom, - self.bounds.top, - ], - cmap=cmap, - norm=norm, - ) - - divider = make_axes_locatable(ax) - - if "orientation" not in legend_kwds.keys(): - legend_kwds["orientation"] = "vertical" - - if legend_kwds["orientation"] == "vertical": - legend_pos = "right" - - elif legend_kwds["orientation"] == "horizontal": - legend_pos = "bottom" - - cax = divider.append_axes(legend_pos, size="10%", pad=0.1) - cbar = plt.colorbar(im, cax=cax, **legend_kwds) - cbar.ax.tick_params(labelsize=legend_fontsize) - - # hide tick labels by default when multiple rows or cols - ax.axes.get_xaxis().set_ticklabels([]) - ax.axes.get_yaxis().set_ticklabels([]) - - # show y-axis tick labels on first subplot - if n == 0 and rows > 1: - ticks_loc = ax.get_yticks().tolist() - ax.yaxis.set_major_locator(mticker.FixedLocator(ticks_loc)) - ax.set_yticklabels( - ax.yaxis.get_majorticklocs().astype("int"), - fontsize=label_fontsize, - ) - - if n == 0 and rows == 1: - ticks_loc = ax.get_xticks().tolist() - ax.xaxis.set_major_locator(mticker.FixedLocator(ticks_loc)) - ax.set_xticklabels( - ax.xaxis.get_majorticklocs().astype("int"), - fontsize=label_fontsize, - ) - - ticks_loc = ax.get_yticks().tolist() - ax.yaxis.set_major_locator(mticker.FixedLocator(ticks_loc)) - ax.set_yticklabels( - ax.yaxis.get_majorticklocs().astype("int"), - fontsize=label_fontsize, - ) - - if rows > 1 and n == (rows * cols) - cols: - ticks_loc = ax.get_xticks().tolist() - ax.xaxis.set_major_locator(mticker.FixedLocator(ticks_loc)) - ax.set_xticklabels( - ax.xaxis.get_majorticklocs().astype("int"), - fontsize=label_fontsize, - ) - - for ax in axs.flat[axs.size - 1 : self.count - 1 : -1]: - ax.set_visible(False) - - plt.subplots_adjust(**subplots_kwds) - - else: - arr = self.iloc[0].read(masked=True, out_shape=out_shape) - cmap = cmaps[0] - norm = norms[0] - axs.set_title(list(names)[0], fontsize=title_fontsize, y=1.00) - im = axs.imshow( - arr, - extent=[ - self.bounds.left, - self.bounds.right, - self.bounds.bottom, - self.bounds.top, - ], - cmap=cmap, - norm=norm, - ) - - divider = make_axes_locatable(axs) - - if "orientation" not in legend_kwds.keys(): - legend_kwds["orientation"] = "vertical" - - if legend_kwds["orientation"] == "vertical": - legend_pos = "right" - - elif legend_kwds["orientation"] == "horizontal": - legend_pos = "bottom" - - cax = divider.append_axes(legend_pos, size="10%", pad=0.1) - cbar = plt.colorbar(im, cax=cax, **legend_kwds) - cbar.ax.tick_params(labelsize=legend_fontsize) - - return axs - - -class RasterLayerPlotMixin: - def plot( - self, - cmap=None, - norm=None, - ax=None, - cax=None, - figsize=None, - out_shape=(500, 500), - categorical=None, - legend=False, - vmin=None, - vmax=None, - fig_kwds=None, - legend_kwds=None, - ): - """Plot a RasterLayer using matplotlib.pyplot.imshow - - Parameters - ---------- - cmap : str (default None) - The name of a colormap recognized by matplotlib. - Overrides the cmap attribute of the RasterLayer. - - norm : matplotlib.colors.Normalize (opt) - A matplotlib.colors.Normalize to apply to the RasterLayer. - This overrides the norm attribute of the RasterLayer. - - ax : matplotlib.pyplot.Artist (optional, default None) - axes instance on which to draw to plot. - - cax : matplotlib.pyplot.Artist (optional, default None) - axes on which to draw the legend. - - figsize : tuple of integers (optional, default None) - Size of the matplotlib.figure.Figure. If the ax argument is - given explicitly, figsize is ignored. - - out_shape : tuple, default=(500, 500) - Number of rows, cols to read from the raster datasets for - plotting. - - categorical : bool (optional, default False) - if True then the raster values will be considered to - represent discrete values, otherwise they are considered to - represent continuous values. This overrides the - RasterLayer 'categorical' attribute. Setting the argument - categorical to True is ignored if the - RasterLayer.categorical is already True. - - legend : bool (optional, default False) - Whether to plot the legend. - - vmin, xmax : scale (optional, default None) - vmin and vmax define the data range that the colormap - covers. By default, the colormap covers the complete value - range of the supplied data. vmin, vmax are ignored if the - norm parameter is used. - - fig_kwds : dict (optional, default None) - Additional arguments to pass to the - matplotlib.pyplot.figure call when creating the figure - object. Ignored if ax is passed to the plot function. - - legend_kwds : dict (optional, default None) - Keyword arguments to pass to matplotlib.pyplot.colorbar(). - - Returns - ------- - ax : matplotlib axes instance - """ - - # some checks - if fig_kwds is None: - fig_kwds = {} - - if ax is None: - if cax is not None: - raise ValueError("'ax' can not be None if 'cax' is not.") - fig, ax = plt.subplots(figsize=figsize, **fig_kwds) - - ax.set_aspect("equal") - - if norm: - if not isinstance(norm, mpl.colors.Normalize): - raise AttributeError( - "norm argument should be a " "matplotlib.colors.Normalize object" - ) - - if cmap is None: - cmap = self.cmap - - if norm is None: - norm = self.norm - - if legend_kwds is None: - legend_kwds = {} - - arr = self.read(masked=True, out_shape=out_shape) - - if categorical is True: - if self.categorical is False: - N = np.bincount(arr) - cmap = discrete_cmap(N, base_cmap=cmap) - vmin, vmax = None, None - - im = ax.imshow( - X=arr, - extent=rasterio.plot.plotting_extent(self.ds), - cmap=cmap, - norm=norm, - vmin=vmin, - vmax=vmax, - ) - - if legend is True: - plt.colorbar(im, cax=cax, ax=ax, **legend_kwds) - - return ax +import math + +import rasterio +import matplotlib as mpl +import matplotlib.pyplot as plt +import numpy as np +from mpl_toolkits.axes_grid1 import make_axes_locatable +import matplotlib.ticker as mticker + + +def discrete_cmap(N, base_cmap=None): + """Create an N-bin discrete colormap from the specified input map. + + Source: + https://gist.github.com/jakevdp/91077b0cae40f8f8244a + + Parameters + ---------- + N : int + The number of colors in the colormap + + base_cmap : str + The name of the matplotlib cmap to convert into a discrete map. + + Returns + ------- + matplotlib.cmap + The cmap converted to a discrete map. + """ + + base = plt.cm.get_cmap(base_cmap) + color_list = base(np.linspace(0, 1, N)) + cmap_name = base.name + str(N) + + return base.from_list(cmap_name, color_list, N) + + +class RasterPlotMixin: + def plot( + self, + cmap=None, + norm=None, + figsize=None, + out_shape=(500, 500), + title_fontsize=8, + label_fontsize=6, + legend_fontsize=6, + names=None, + fig_kwds=None, + legend_kwds=None, + subplots_kwds=None, + ): + """Plot a Raster object as a raster matrix + + Parameters + ---------- + cmap : str (opt), default=None + Specify a single cmap to apply to all of the RasterLayers. + This overides the cmap attribute of each RasterLayer. + + norm : matplotlib.colors.Normalize (opt), default=None + A matplotlib.colors.Normalize to apply to all of the + RasterLayers. This overides the norm attribute of each + RasterLayer. + + figsize : tuple (opt), default=None + Size of the resulting matplotlib.figure.Figure. + + out_shape : tuple, default=(500, 500) + Number of rows, cols to read from the raster datasets for + plotting. + + title_fontsize : any number, default=8 + Size in pts of titles. + + label_fontsize : any number, default=6 + Size in pts of axis ticklabels. + + legend_fontsize : any number, default=6 + Size in pts of legend ticklabels. + + names : list (opt), default=None + Optionally supply a list of names for each RasterLayer to + override the default layer names for the titles. + + fig_kwds : dict (opt), default=None + Additional arguments to pass to the + matplotlib.pyplot.figure call when creating the figure + object. + + legend_kwds : dict (opt), default=None + Additional arguments to pass to the + matplotlib.pyplot.colorbar call when creating the colorbar + object. + + subplots_kwds : dict (opt), default=None + Additional arguments to pass to the + matplotlib.pyplot.subplots_adjust function. These are used to + control the spacing and position of each subplot, and can + include{left=None, bottom=None, right=None, top=None, + wspace=None, hspace=None}. + + Returns + ------- + axs : numpy.ndarray + array of matplotlib.axes._subplots.AxesSubplot or a single + matplotlib.axes._subplots.AxesSubplot if Raster object + contains only a single layer. + """ + + # some checks + if norm: + if not isinstance(norm, mpl.colors.Normalize): + raise AttributeError( + "norm argument should be a matplotlib.colors.Normalize object" + ) + + if cmap: + cmaps = [cmap for i in self.iloc] + else: + cmaps = [i.cmap for i in self.iloc] + + if norm: + norms = [norm for i in self.iloc] + else: + norms = [i.norm for i in self.iloc] + + if names is None: + names = self.names + else: + if len(names) != self.count: + raise AttributeError( + "arguments 'names' needs to be the same length as the number of " + "RasterLayer objects " + ) + + if fig_kwds is None: + fig_kwds = {} + + if legend_kwds is None: + legend_kwds = {} + + if subplots_kwds is None: + subplots_kwds = {} + + if figsize: + fig_kwds["figsize"] = figsize + + # estimate required number of rows and columns in figure + rows = int(np.sqrt(self.count)) + cols = int(math.ceil(np.sqrt(self.count))) + + if rows * cols < self.count: + rows += 1 + + fig, axs = plt.subplots(rows, cols, **fig_kwds) + + # axs.flat is an iterator over the row-order flattened axs array + if isinstance(axs, np.ndarray): + for ax, n, cmap, norm, name in zip( + axs.flat, range(self.count), cmaps, norms, names + ): + + arr = self.iloc[n].read(masked=True, out_shape=out_shape) + ax.set_title(name, fontsize=title_fontsize, y=1.00) + + im = ax.imshow( + arr, + extent=[ + self.bounds.left, + self.bounds.right, + self.bounds.bottom, + self.bounds.top, + ], + cmap=cmap, + norm=norm, + ) + + divider = make_axes_locatable(ax) + + if "orientation" not in legend_kwds.keys(): + legend_kwds["orientation"] = "vertical" + + if legend_kwds["orientation"] == "vertical": + legend_pos = "right" + + elif legend_kwds["orientation"] == "horizontal": + legend_pos = "bottom" + + cax = divider.append_axes(legend_pos, size="10%", pad=0.1) + cbar = plt.colorbar(im, cax=cax, **legend_kwds) + cbar.ax.tick_params(labelsize=legend_fontsize) + + # hide tick labels by default when multiple rows or cols + ax.axes.get_xaxis().set_ticklabels([]) + ax.axes.get_yaxis().set_ticklabels([]) + + # show y-axis tick labels on first subplot + if n == 0 and rows > 1: + ticks_loc = ax.get_yticks().tolist() + ax.yaxis.set_major_locator(mticker.FixedLocator(ticks_loc)) + ax.set_yticklabels( + ax.yaxis.get_majorticklocs().astype("int"), + fontsize=label_fontsize, + ) + + if n == 0 and rows == 1: + ticks_loc = ax.get_xticks().tolist() + ax.xaxis.set_major_locator(mticker.FixedLocator(ticks_loc)) + ax.set_xticklabels( + ax.xaxis.get_majorticklocs().astype("int"), + fontsize=label_fontsize, + ) + + ticks_loc = ax.get_yticks().tolist() + ax.yaxis.set_major_locator(mticker.FixedLocator(ticks_loc)) + ax.set_yticklabels( + ax.yaxis.get_majorticklocs().astype("int"), + fontsize=label_fontsize, + ) + + if rows > 1 and n == (rows * cols) - cols: + ticks_loc = ax.get_xticks().tolist() + ax.xaxis.set_major_locator(mticker.FixedLocator(ticks_loc)) + ax.set_xticklabels( + ax.xaxis.get_majorticklocs().astype("int"), + fontsize=label_fontsize, + ) + + for ax in axs.flat[axs.size - 1 : self.count - 1 : -1]: + ax.set_visible(False) + + plt.subplots_adjust(**subplots_kwds) + + else: + arr = self.iloc[0].read(masked=True, out_shape=out_shape) + cmap = cmaps[0] + norm = norms[0] + axs.set_title(list(names)[0], fontsize=title_fontsize, y=1.00) + im = axs.imshow( + arr, + extent=[ + self.bounds.left, + self.bounds.right, + self.bounds.bottom, + self.bounds.top, + ], + cmap=cmap, + norm=norm, + ) + + divider = make_axes_locatable(axs) + + if "orientation" not in legend_kwds.keys(): + legend_kwds["orientation"] = "vertical" + + if legend_kwds["orientation"] == "vertical": + legend_pos = "right" + + elif legend_kwds["orientation"] == "horizontal": + legend_pos = "bottom" + + cax = divider.append_axes(legend_pos, size="10%", pad=0.1) + cbar = plt.colorbar(im, cax=cax, **legend_kwds) + cbar.ax.tick_params(labelsize=legend_fontsize) + + return axs + + +class RasterLayerPlotMixin: + def plot( + self, + cmap=None, + norm=None, + ax=None, + cax=None, + figsize=None, + out_shape=(500, 500), + categorical=None, + legend=False, + vmin=None, + vmax=None, + fig_kwds=None, + legend_kwds=None, + ): + """Plot a RasterLayer using matplotlib.pyplot.imshow + + Parameters + ---------- + cmap : str (default None) + The name of a colormap recognized by matplotlib. + Overrides the cmap attribute of the RasterLayer. + + norm : matplotlib.colors.Normalize (opt) + A matplotlib.colors.Normalize to apply to the RasterLayer. + This overrides the norm attribute of the RasterLayer. + + ax : matplotlib.pyplot.Artist (optional, default None) + axes instance on which to draw to plot. + + cax : matplotlib.pyplot.Artist (optional, default None) + axes on which to draw the legend. + + figsize : tuple of integers (optional, default None) + Size of the matplotlib.figure.Figure. If the ax argument is + given explicitly, figsize is ignored. + + out_shape : tuple, default=(500, 500) + Number of rows, cols to read from the raster datasets for + plotting. + + categorical : bool (optional, default False) + if True then the raster values will be considered to + represent discrete values, otherwise they are considered to + represent continuous values. This overrides the + RasterLayer 'categorical' attribute. Setting the argument + categorical to True is ignored if the + RasterLayer.categorical is already True. + + legend : bool (optional, default False) + Whether to plot the legend. + + vmin, xmax : scale (optional, default None) + vmin and vmax define the data range that the colormap + covers. By default, the colormap covers the complete value + range of the supplied data. vmin, vmax are ignored if the + norm parameter is used. + + fig_kwds : dict (optional, default None) + Additional arguments to pass to the + matplotlib.pyplot.figure call when creating the figure + object. Ignored if ax is passed to the plot function. + + legend_kwds : dict (optional, default None) + Keyword arguments to pass to matplotlib.pyplot.colorbar(). + + Returns + ------- + ax : matplotlib axes instance + """ + + # some checks + if fig_kwds is None: + fig_kwds = {} + + if ax is None: + if cax is not None: + raise ValueError("'ax' can not be None if 'cax' is not.") + fig, ax = plt.subplots(figsize=figsize, **fig_kwds) + + ax.set_aspect("equal") + + if norm: + if not isinstance(norm, mpl.colors.Normalize): + raise AttributeError( + "norm argument should be a " "matplotlib.colors.Normalize object" + ) + + if cmap is None: + cmap = self.cmap + + if norm is None: + norm = self.norm + + if legend_kwds is None: + legend_kwds = {} + + arr = self.read(masked=True, out_shape=out_shape) + + if categorical is True: + if self.categorical is False: + N = np.bincount(arr) + cmap = discrete_cmap(N, base_cmap=cmap) + vmin, vmax = None, None + + im = ax.imshow( + X=arr, + extent=rasterio.plot.plotting_extent(self.ds), + cmap=cmap, + norm=norm, + vmin=vmin, + vmax=vmax, + ) + + if legend is True: + plt.colorbar(im, cax=cax, ax=ax, **legend_kwds) + + return ax diff --git a/pyspatialml/_prediction.py b/pyspatialml/_prediction.py index 5733464..765ac66 100644 --- a/pyspatialml/_prediction.py +++ b/pyspatialml/_prediction.py @@ -1,250 +1,250 @@ -import numpy as np -import pandas as pd - - -def stack_constants(flat_pixels, constants, names=None): - """Column stack any constant values into the flat_pixels array. - - Used to add additional constant features to the Raster object. - - Parameters - ---------- - flat_pixels : ndarray - 2d numpy array representing the flattened raster data in - (sample_n, band_values) format. - - constants : list-like object, 1d array, or dict - Array of constant values to be added to the flat_pixels array - as additional features. - - If a dict is passed, the dict keys must refer to names of the - features in the flat_pixels array, and the values will replace - these features with constant values. - - names : list-like object (optional, default=None) - Names of the raster layers. - """ - if isinstance(constants, (int, float)): - constants = [constants] - - if isinstance(constants, list): - constants = np.asarray(constants) - constants = np.broadcast_to( - constants, (flat_pixels.shape[0], constants.shape[0]) - ) - flat_pixels = np.column_stack((flat_pixels, constants)) - - elif isinstance(constants, dict): - - keys_not_in_raster = [i for i in constants.keys() if i not in names] - - if len(keys_not_in_raster) > 0: - raise ValueError( - "The following keys are not in the raster: {x}".format( - x=keys_not_in_raster - ) - ) - - flat_pixels = pd.DataFrame(flat_pixels, columns=names) - - for key, value in constants.items(): - flat_pixels[key] = value - - flat_pixels = flat_pixels.values - - elif isinstance(constants, np.ndarray): - raise ValueError("constants must be a list or a numpy.ndarray") - - return flat_pixels - - -def predict_output(img, estimator, constants=None, names=None): - """Prediction function for classification or regression response. - - Parameters - ---- - img : tuple (window, numpy.ndarray) - A window object, and a 3d ndarray of raster data with the - dimensions in order of (band, rows, columns). - - estimator : estimator object implementing 'fit' - The object to use to fit the data. - - constants : list-like object, 1d array, or dict - Array of constant values to be added to the flat_pixels array - as additional features. - - If a dict is passed, the dict keys must refer to names of the - features in the flat_pixels array, and the values will replace - these features with constant values. - - names : list-like object (optional, default=None) - Names of the raster layers. - - Returns - ------- - numpy.ndarray - 2d numpy array representing a single band raster containing the - classification or regression result. - """ - window, img = img - img = np.ma.masked_invalid(img) - - # reorder into rows, cols, bands(transpose) - n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] - - # reshape into 2D array (rows=sample_n, cols=band_values) - n_samples = rows * cols - flat_pixels = img.transpose(1, 2, 0).reshape((n_samples, n_features)) - - # create mask for NaN values - flat_pixels_mask = flat_pixels.mask.copy() - - # fill nans for prediction - flat_pixels = flat_pixels.filled(0) - - # add constants - if constants is not None: - flat_pixels = stack_constants(flat_pixels, constants, names) - - # predict and replace mask - result = estimator.predict(flat_pixels) - result = np.ma.masked_array(data=result, mask=flat_pixels_mask.any(axis=1)) - - # reshape the prediction from a 1D into 3D array [band, row, col] - result = result.reshape((1, window.height, window.width)) - - return result - - -def predict_prob(img, estimator, constants=None, names=None): - """Class probabilities function. - - Parameters - ---------- - img : tuple (window, numpy.ndarray) - A window object, and a 3d ndarray of raster data with the - dimensions in order of (band, rows, columns). - - estimator : estimator object implementing 'fit' - The object to use to fit the data. - - constants : list-like object, 1d array, or dict - Array of constant values to be added to the flat_pixels array - as additional features. - - If a dict is passed, the dict keys must refer to names of the - features in the flat_pixels array, and the values will replace - these features with constant values. - - names : list-like object (optional, default=None) - Names of the raster layers. - - Returns - ------- - numpy.ndarray - Multi band raster as a 3d numpy array containing the - probabilities associated with each class. ndarray dimensions - are in the order of (class, row, column). - """ - window, img = img - - # reorder into rows, cols, bands (transpose) - n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] - img = np.ma.masked_invalid(img) - mask2d = img.mask.any(axis=0) - - # then resample into 2D array (rows=sample_n, cols=band_values) - n_samples = rows * cols - flat_pixels = img.transpose(1, 2, 0).reshape((n_samples, n_features)) - - # fill mask with zeros for prediction - flat_pixels = flat_pixels.filled(0) - - # add constants - if constants is not None: - flat_pixels = stack_constants(flat_pixels, constants, names) - - # predict probabilities - result_proba = estimator.predict_proba(flat_pixels) - - # reshape class probabilities back to 3D array [class, rows, cols] - result_proba = result_proba.reshape( - (window.height, window.width, result_proba.shape[1]) - ) - - # reshape band into rasterio format [band, row, col] - result_proba = result_proba.transpose(2, 0, 1) - - # repeat mask for n_bands - mask3d = np.repeat( - a=mask2d[np.newaxis, :, :], repeats=result_proba.shape[0], axis=0 - ) - - # convert proba to masked array - result_proba = np.ma.masked_array(result_proba, mask=mask3d, fill_value=np.nan) - - return result_proba - - -def predict_multioutput(img, estimator, constants=None, names=None): - """Multi-target prediction function. - - Parameters - ---------- - img : tuple (window, numpy.ndarray) - A window object, and a 3d ndarray of raster data with the - dimensions in order of (band, rows, columns). - - estimator : estimator object implementing 'fit' - The object to use to fit the data. - - constants : list-like object, 1d array, or dict - Array of constant values to be added to the flat_pixels array - as additional features. - - If a dict is passed, the dict keys must refer to names of the - features in the flat_pixels array, and the values will replace - these features with constant values. - - names : list-like object (optional, default=None) - Names of the raster layers. - - Returns - ------- - numpy.ndarray - 3d numpy array representing the multi-target prediction result - with the dimensions in the order of (target, row, column). - """ - window, img = img - - # reorder into rows, cols, bands(transpose) - n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] - img = np.ma.masked_invalid(img) - mask2d = img.mask.any(axis=0) - - # reshape into 2D array (rows=sample_n, cols=band_values) - n_samples = rows * cols - flat_pixels = img.transpose(1, 2, 0).reshape((n_samples, n_features)) - flat_pixels = flat_pixels.filled(0) - - # add constants - if constants is not None: - flat_pixels = stack_constants(flat_pixels, constants, names) - - # predict probabilities - result = estimator.predict(flat_pixels) - - # reshape class probabilities back to 3D array [class, rows, cols] - result = result.reshape((window.height, window.width, result.shape[1])) - - # reshape band into rasterio format [band, row, col] - result = result.transpose(2, 0, 1) - - # repeat mask for n_bands - mask3d = np.repeat(a=mask2d[np.newaxis, :, :], repeats=result.shape[0], axis=0) - - # convert proba to masked array - result = np.ma.masked_array(result, mask=mask3d, fill_value=np.nan) - - return result +import numpy as np +import pandas as pd + + +def stack_constants(flat_pixels, constants, names=None): + """Column stack any constant values into the flat_pixels array. + + Used to add additional constant features to the Raster object. + + Parameters + ---------- + flat_pixels : ndarray + 2d numpy array representing the flattened raster data in + (sample_n, band_values) format. + + constants : list-like object, 1d array, or dict + Array of constant values to be added to the flat_pixels array + as additional features. + + If a dict is passed, the dict keys must refer to names of the + features in the flat_pixels array, and the values will replace + these features with constant values. + + names : list-like object (optional, default=None) + Names of the raster layers. + """ + if isinstance(constants, (int, float)): + constants = [constants] + + if isinstance(constants, list): + constants = np.asarray(constants) + constants = np.broadcast_to( + constants, (flat_pixels.shape[0], constants.shape[0]) + ) + flat_pixels = np.column_stack((flat_pixels, constants)) + + elif isinstance(constants, dict): + + keys_not_in_raster = [i for i in constants.keys() if i not in names] + + if len(keys_not_in_raster) > 0: + raise ValueError( + "The following keys are not in the raster: {x}".format( + x=keys_not_in_raster + ) + ) + + flat_pixels = pd.DataFrame(flat_pixels, columns=names) + + for key, value in constants.items(): + flat_pixels[key] = value + + flat_pixels = flat_pixels.values + + elif isinstance(constants, np.ndarray): + raise ValueError("constants must be a list or a numpy.ndarray") + + return flat_pixels + + +def predict_output(img, estimator, constants=None, names=None): + """Prediction function for classification or regression response. + + Parameters + ---- + img : tuple (window, numpy.ndarray) + A window object, and a 3d ndarray of raster data with the + dimensions in order of (band, rows, columns). + + estimator : estimator object implementing 'fit' + The object to use to fit the data. + + constants : list-like object, 1d array, or dict + Array of constant values to be added to the flat_pixels array + as additional features. + + If a dict is passed, the dict keys must refer to names of the + features in the flat_pixels array, and the values will replace + these features with constant values. + + names : list-like object (optional, default=None) + Names of the raster layers. + + Returns + ------- + numpy.ndarray + 2d numpy array representing a single band raster containing the + classification or regression result. + """ + window, img = img + img = np.ma.masked_invalid(img) + + # reorder into rows, cols, bands(transpose) + n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] + + # reshape into 2D array (rows=sample_n, cols=band_values) + n_samples = rows * cols + flat_pixels = img.transpose(1, 2, 0).reshape((n_samples, n_features)) + + # create mask for NaN values + flat_pixels_mask = flat_pixels.mask.copy() + + # fill nans for prediction + flat_pixels = flat_pixels.filled(0) + + # add constants + if constants is not None: + flat_pixels = stack_constants(flat_pixels, constants, names) + + # predict and replace mask + result = estimator.predict(flat_pixels) + result = np.ma.masked_array(data=result, mask=flat_pixels_mask.any(axis=1)) + + # reshape the prediction from a 1D into 3D array [band, row, col] + result = result.reshape((1, window.height, window.width)) + + return result + + +def predict_prob(img, estimator, constants=None, names=None): + """Class probabilities function. + + Parameters + ---------- + img : tuple (window, numpy.ndarray) + A window object, and a 3d ndarray of raster data with the + dimensions in order of (band, rows, columns). + + estimator : estimator object implementing 'fit' + The object to use to fit the data. + + constants : list-like object, 1d array, or dict + Array of constant values to be added to the flat_pixels array + as additional features. + + If a dict is passed, the dict keys must refer to names of the + features in the flat_pixels array, and the values will replace + these features with constant values. + + names : list-like object (optional, default=None) + Names of the raster layers. + + Returns + ------- + numpy.ndarray + Multi band raster as a 3d numpy array containing the + probabilities associated with each class. ndarray dimensions + are in the order of (class, row, column). + """ + window, img = img + + # reorder into rows, cols, bands (transpose) + n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] + img = np.ma.masked_invalid(img) + mask2d = img.mask.any(axis=0) + + # then resample into 2D array (rows=sample_n, cols=band_values) + n_samples = rows * cols + flat_pixels = img.transpose(1, 2, 0).reshape((n_samples, n_features)) + + # fill mask with zeros for prediction + flat_pixels = flat_pixels.filled(0) + + # add constants + if constants is not None: + flat_pixels = stack_constants(flat_pixels, constants, names) + + # predict probabilities + result_proba = estimator.predict_proba(flat_pixels) + + # reshape class probabilities back to 3D array [class, rows, cols] + result_proba = result_proba.reshape( + (window.height, window.width, result_proba.shape[1]) + ) + + # reshape band into rasterio format [band, row, col] + result_proba = result_proba.transpose(2, 0, 1) + + # repeat mask for n_bands + mask3d = np.repeat( + a=mask2d[np.newaxis, :, :], repeats=result_proba.shape[0], axis=0 + ) + + # convert proba to masked array + result_proba = np.ma.masked_array(result_proba, mask=mask3d, fill_value=np.nan) + + return result_proba + + +def predict_multioutput(img, estimator, constants=None, names=None): + """Multi-target prediction function. + + Parameters + ---------- + img : tuple (window, numpy.ndarray) + A window object, and a 3d ndarray of raster data with the + dimensions in order of (band, rows, columns). + + estimator : estimator object implementing 'fit' + The object to use to fit the data. + + constants : list-like object, 1d array, or dict + Array of constant values to be added to the flat_pixels array + as additional features. + + If a dict is passed, the dict keys must refer to names of the + features in the flat_pixels array, and the values will replace + these features with constant values. + + names : list-like object (optional, default=None) + Names of the raster layers. + + Returns + ------- + numpy.ndarray + 3d numpy array representing the multi-target prediction result + with the dimensions in the order of (target, row, column). + """ + window, img = img + + # reorder into rows, cols, bands(transpose) + n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] + img = np.ma.masked_invalid(img) + mask2d = img.mask.any(axis=0) + + # reshape into 2D array (rows=sample_n, cols=band_values) + n_samples = rows * cols + flat_pixels = img.transpose(1, 2, 0).reshape((n_samples, n_features)) + flat_pixels = flat_pixels.filled(0) + + # add constants + if constants is not None: + flat_pixels = stack_constants(flat_pixels, constants, names) + + # predict probabilities + result = estimator.predict(flat_pixels) + + # reshape class probabilities back to 3D array [class, rows, cols] + result = result.reshape((window.height, window.width, result.shape[1])) + + # reshape band into rasterio format [band, row, col] + result = result.transpose(2, 0, 1) + + # repeat mask for n_bands + mask3d = np.repeat(a=mask2d[np.newaxis, :, :], repeats=result.shape[0], axis=0) + + # convert proba to masked array + result = np.ma.masked_array(result, mask=mask3d, fill_value=np.nan) + + return result diff --git a/pyspatialml/_rasterstats.py b/pyspatialml/_rasterstats.py index 9cba3f1..8a4d4ed 100644 --- a/pyspatialml/_rasterstats.py +++ b/pyspatialml/_rasterstats.py @@ -1,137 +1,137 @@ -import numpy as np - - -class RasterStatsMixin: - def _stats(self, max_pixels): - rel_width = self.shape[1] / max_pixels - - if rel_width > 1: - col_scaling = round(max_pixels / rel_width) - row_scaling = max_pixels - col_scaling - else: - col_scaling = round(max_pixels * rel_width) - row_scaling = max_pixels - col_scaling - - out_shape = (row_scaling, col_scaling) - arr = self.read(masked=True, out_shape=out_shape) - return arr.reshape((arr.shape[0], arr.shape[1] * arr.shape[2])) - - def min(self, max_pixels=10000): - arr = self._stats(max_pixels) - return np.nanmin(arr, axis=1).data - - def max(self, max_pixels=10000): - arr = self._stats(max_pixels) - return np.nanmax(arr, axis=1).data - - def mean(self, max_pixels=10000): - arr = self._stats(max_pixels) - return np.nanmean(arr, axis=1).data - - def median(self, max_pixels=10000): - arr = self._stats(max_pixels) - return np.nanmedian(arr, axis=1).data - - def stddev(self, max_pixels=10000): - arr = self._stats(max_pixels) - return np.nanstd(arr, axis=1).data - - -class RasterLayerStatsMixin: - def _stats(self, max_pixels): - """Take a sample of pixels from which to derive per-band - statistics.""" - - rel_width = self.shape[1] / max_pixels - - if rel_width > 1: - col_scaling = round(max_pixels / rel_width) - row_scaling = max_pixels - col_scaling - else: - col_scaling = round(max_pixels * rel_width) - row_scaling = max_pixels - col_scaling - - out_shape = (row_scaling, col_scaling) - arr = self.read(masked=True, out_shape=out_shape) - arr = arr.flatten() - return arr - - def min(self, max_pixels=10000): - """Minimum value. - - Parameters - ---------- - max_pixels : int - Number of pixels used to inform statistical estimate. - - Returns - ------- - numpy.float32 - The minimum value of the object - """ - arr = self._stats(max_pixels) - return np.nanmin(arr) - - def max(self, max_pixels=10000): - """Maximum value. - - Parameters - ---------- - max_pixels : int - Number of pixels used to inform statistical estimate. - - Returns - ------- - numpy.float32 - The maximum value of the object's pixels. - """ - arr = self._stats(max_pixels) - return np.nanmax(arr) - - def mean(self, max_pixels=10000): - """Mean value - - Parameters - ---------- - max_pixels : int - Number of pixels used to inform statistical estimate. - - Returns - ------- - numpy.float32 - The mean value of the object's pixels. - """ - arr = self._stats(max_pixels) - return np.nanmean(arr) - - def median(self, max_pixels=10000): - """Median value - - Parameters - ---------- - max_pixels : int - Number of pixels used to inform statistical estimate. - - Returns - ------- - numpy.float32 - The medium value of the object's pixels. - """ - arr = self._stats(max_pixels) - return np.nanmedian(arr) - - def stddev(self, max_pixels=10000): - """Standard deviation - - Parameters - ---------- - max_pixels : int - Number of pixels used to inform statistical estimate. - - Returns - ------- - numpy.float32 - The standard deviation of the object's pixels. - """ - arr = self._stats(max_pixels) - return np.nanstd(arr) +import numpy as np + + +class RasterStatsMixin: + def _stats(self, max_pixels): + rel_width = self.shape[1] / max_pixels + + if rel_width > 1: + col_scaling = round(max_pixels / rel_width) + row_scaling = max_pixels - col_scaling + else: + col_scaling = round(max_pixels * rel_width) + row_scaling = max_pixels - col_scaling + + out_shape = (row_scaling, col_scaling) + arr = self.read(masked=True, out_shape=out_shape) + return arr.reshape((arr.shape[0], arr.shape[1] * arr.shape[2])) + + def min(self, max_pixels=10000): + arr = self._stats(max_pixels) + return np.nanmin(arr, axis=1).data + + def max(self, max_pixels=10000): + arr = self._stats(max_pixels) + return np.nanmax(arr, axis=1).data + + def mean(self, max_pixels=10000): + arr = self._stats(max_pixels) + return np.nanmean(arr, axis=1).data + + def median(self, max_pixels=10000): + arr = self._stats(max_pixels) + return np.nanmedian(arr, axis=1).data + + def stddev(self, max_pixels=10000): + arr = self._stats(max_pixels) + return np.nanstd(arr, axis=1).data + + +class RasterLayerStatsMixin: + def _stats(self, max_pixels): + """Take a sample of pixels from which to derive per-band + statistics.""" + + rel_width = self.shape[1] / max_pixels + + if rel_width > 1: + col_scaling = round(max_pixels / rel_width) + row_scaling = max_pixels - col_scaling + else: + col_scaling = round(max_pixels * rel_width) + row_scaling = max_pixels - col_scaling + + out_shape = (row_scaling, col_scaling) + arr = self.read(masked=True, out_shape=out_shape) + arr = arr.flatten() + return arr + + def min(self, max_pixels=10000): + """Minimum value. + + Parameters + ---------- + max_pixels : int + Number of pixels used to inform statistical estimate. + + Returns + ------- + numpy.float32 + The minimum value of the object + """ + arr = self._stats(max_pixels) + return np.nanmin(arr) + + def max(self, max_pixels=10000): + """Maximum value. + + Parameters + ---------- + max_pixels : int + Number of pixels used to inform statistical estimate. + + Returns + ------- + numpy.float32 + The maximum value of the object's pixels. + """ + arr = self._stats(max_pixels) + return np.nanmax(arr) + + def mean(self, max_pixels=10000): + """Mean value + + Parameters + ---------- + max_pixels : int + Number of pixels used to inform statistical estimate. + + Returns + ------- + numpy.float32 + The mean value of the object's pixels. + """ + arr = self._stats(max_pixels) + return np.nanmean(arr) + + def median(self, max_pixels=10000): + """Median value + + Parameters + ---------- + max_pixels : int + Number of pixels used to inform statistical estimate. + + Returns + ------- + numpy.float32 + The medium value of the object's pixels. + """ + arr = self._stats(max_pixels) + return np.nanmedian(arr) + + def stddev(self, max_pixels=10000): + """Standard deviation + + Parameters + ---------- + max_pixels : int + Number of pixels used to inform statistical estimate. + + Returns + ------- + numpy.float32 + The standard deviation of the object's pixels. + """ + arr = self._stats(max_pixels) + return np.nanstd(arr) diff --git a/pyspatialml/_utils.py b/pyspatialml/_utils.py index ea607f6..4335f50 100644 --- a/pyspatialml/_utils.py +++ b/pyspatialml/_utils.py @@ -1,44 +1,44 @@ -import multiprocessing -import numpy as np - - -def get_nodata_value(dtype): - """Get a nodata value based on the minimum value permissible by dtype - - Parameters - ---------- - dtype : str or dtype - dtype to return a nodata value for - - Returns - ------- - nodata : any number - A nodata value that is accomodated by the supplied dtype - """ - try: - nodata = np.iinfo(dtype).min - except ValueError: - nodata = np.finfo(dtype).min - - return nodata - - -def get_num_workers(n_jobs): - """Determine cpu count using scikit-learn convention of -1, -2 ... - - Parameters - ---------- - n_jobs : int - Number of processing cores including -1 for all cores -1, etc. - - Returns - ------- - n_jobs : int - The actual number of processing cores. - """ - n_cpus = multiprocessing.cpu_count() - - if n_jobs < 0: - n_jobs = n_cpus + n_jobs + 1 - - return n_jobs +import multiprocessing +import numpy as np + + +def get_nodata_value(dtype): + """Get a nodata value based on the minimum value permissible by dtype + + Parameters + ---------- + dtype : str or dtype + dtype to return a nodata value for + + Returns + ------- + nodata : any number + A nodata value that is accomodated by the supplied dtype + """ + try: + nodata = np.iinfo(dtype).min + except ValueError: + nodata = np.finfo(dtype).min + + return nodata + + +def get_num_workers(n_jobs): + """Determine cpu count using scikit-learn convention of -1, -2 ... + + Parameters + ---------- + n_jobs : int + Number of processing cores including -1 for all cores -1, etc. + + Returns + ------- + n_jobs : int + The actual number of processing cores. + """ + n_cpus = multiprocessing.cpu_count() + + if n_jobs < 0: + n_jobs = n_cpus + n_jobs + 1 + + return n_jobs diff --git a/pyspatialml/datasets/extracted_pixels.txt b/pyspatialml/datasets/extracted_pixels.txt index e5205a3..8cb718c 100644 --- a/pyspatialml/datasets/extracted_pixels.txt +++ b/pyspatialml/datasets/extracted_pixels.txt @@ -1,2437 +1,2437 @@ -id b1 b2 b3 b4 b5 b7 -133 59 5 94 76 80 58 89 70 -134 59 5 91 76 77 56 81 61 -135 59 5 84 70 76 62 102 65 -136 59 5 79 65 75 64 123 78 -137 59 5 77 64 70 62 128 81 -138 59 5 77 61 67 61 119 75 -139 59 5 79 61 74 64 137 86 -140 59 5 77 61 70 61 126 79 -141 59 5 78 62 68 61 121 77 -142 59 5 77 60 65 61 118 77 -143 59 5 75 60 64 60 114 72 -144 59 5 75 59 60 59 104 68 -145 59 5 81 63 72 72 110 68 -133 60 5 79 65 64 63 89 51 -134 60 5 78 66 66 61 100 56 -135 60 5 80 63 66 60 96 60 -136 60 5 76 59 62 60 90 56 -137 60 5 73 59 60 58 93 56 -138 60 5 73 56 57 58 86 52 -139 60 5 75 60 64 59 113 72 -140 60 5 78 60 63 58 113 72 -141 60 5 77 58 61 59 108 68 -142 60 5 73 57 57 56 100 63 -143 60 5 75 58 57 55 97 61 -144 60 5 73 59 57 58 98 61 -145 60 5 75 60 63 59 104 62 -133 61 5 73 59 60 65 94 58 -134 61 5 74 55 58 57 95 59 -135 61 5 72 55 54 53 84 52 -136 61 5 75 55 55 58 85 52 -137 61 5 75 55 55 58 85 52 -138 61 5 72 52 51 64 75 43 -139 61 5 72 56 58 57 85 53 -140 61 5 74 59 61 58 101 62 -141 61 5 76 57 61 59 100 59 -142 61 5 73 55 58 54 85 52 -143 61 5 71 55 52 56 81 48 -144 61 5 75 57 55 56 90 55 -145 61 5 73 57 56 54 91 55 -133 62 5 75 58 54 59 89 52 -134 62 5 70 53 52 58 83 50 -135 62 5 69 54 51 57 77 46 -136 62 5 74 53 49 59 81 47 -137 62 5 71 52 44 69 67 39 -138 62 5 68 55 45 69 65 36 -139 62 5 74 56 57 56 88 52 -140 62 5 71 55 58 59 95 56 -141 62 5 72 54 54 55 85 57 -142 62 5 71 55 51 56 79 47 -143 62 5 72 54 51 55 75 48 -144 62 5 69 51 48 53 60 35 -145 62 5 69 51 46 54 63 36 -133 63 5 74 58 55 61 95 57 -134 63 5 70 56 52 68 78 47 -135 63 5 69 54 47 73 76 43 -136 63 5 72 55 46 71 73 40 -137 63 5 72 56 48 72 67 36 -138 63 5 68 55 47 72 71 38 -139 63 5 71 54 56 58 83 49 -140 63 5 71 54 56 58 90 54 -141 63 5 70 55 52 56 87 54 -142 63 5 71 52 48 53 71 42 -143 63 5 68 51 46 58 67 38 -144 63 5 70 56 50 64 73 40 -145 63 5 68 49 42 59 58 32 -134 64 5 74 62 64 73 109 66 -135 64 5 73 62 57 84 103 59 -136 64 5 73 60 52 84 102 54 -137 64 5 72 59 52 75 90 48 -138 64 5 68 55 47 72 71 38 -139 64 5 70 54 51 63 84 50 -140 64 5 73 56 57 58 93 59 -141 64 5 70 56 54 62 85 48 -142 64 5 72 54 52 65 72 41 -143 64 5 70 55 47 72 79 44 -144 64 5 71 61 57 75 93 53 -145 64 5 71 51 48 60 64 36 -134 65 5 80 66 74 74 111 69 -135 65 5 79 69 69 80 110 64 -136 65 5 74 63 61 80 112 62 -137 65 5 73 58 55 73 100 57 -138 65 5 70 57 52 71 79 43 -139 65 5 74 58 62 65 91 54 -140 65 5 75 58 61 62 102 62 -141 65 5 73 56 60 65 98 58 -142 65 5 73 57 55 67 93 55 -143 65 5 72 57 51 74 98 55 -144 65 5 71 57 55 68 90 53 -145 65 5 72 54 52 60 74 41 -138 80 7 87 75 80 71 107 73 -139 80 7 96 84 95 77 128 88 -140 80 7 94 86 99 78 138 89 -141 80 7 102 91 107 75 127 86 -138 81 7 98 88 96 79 139 98 -139 81 7 110 97 115 81 152 122 -140 81 7 125 115 135 76 139 125 -141 81 7 100 91 101 75 109 83 -138 82 7 94 80 89 74 136 99 -139 82 7 104 89 98 71 142 116 -140 82 7 135 129 148 78 179 176 -141 82 7 118 111 128 71 143 137 -138 83 7 86 74 74 81 119 66 -139 83 7 100 88 97 76 125 96 -140 83 7 131 125 147 78 183 178 -141 83 7 132 122 141 74 172 167 -138 84 7 112 100 110 88 132 101 -139 84 7 117 110 122 82 127 105 -140 84 7 130 123 144 79 182 177 -141 84 7 124 112 133 68 173 174 -146 84 7 95 85 95 62 116 95 -147 84 7 116 108 129 69 137 133 -148 84 7 126 118 144 73 143 132 -149 84 7 109 96 116 64 128 116 -150 84 7 121 108 124 74 107 83 -145 85 7 93 76 85 62 105 77 -146 85 7 113 97 114 62 121 112 -147 85 7 122 111 134 72 139 133 -148 85 7 122 116 134 72 140 131 -149 85 7 115 102 121 64 125 112 -150 85 7 133 122 141 73 109 89 -145 86 7 116 104 118 68 146 131 -146 86 7 125 111 127 67 144 140 -147 86 7 122 112 131 69 138 132 -148 86 7 117 106 126 65 115 108 -149 86 7 119 109 121 65 124 111 -150 86 7 114 99 109 61 108 91 -403 114 1 97 83 90 58 85 74 -404 114 1 99 81 88 51 77 65 -402 115 1 98 88 93 57 100 90 -403 115 1 99 86 91 57 90 74 -404 115 1 93 76 75 57 84 69 -405 115 1 83 67 68 55 85 66 -402 116 1 129 117 129 69 103 91 -403 116 1 97 85 88 64 93 73 -404 116 1 106 90 98 64 99 76 -405 116 1 105 94 100 64 103 88 -406 116 1 112 97 109 65 101 92 -407 116 1 104 94 109 60 92 80 -408 116 1 112 102 119 62 101 88 -401 117 1 124 105 116 54 120 117 -402 117 1 157 139 156 80 165 156 -403 117 1 94 87 96 78 115 82 -404 117 1 103 87 97 63 101 83 -405 117 1 118 105 117 69 106 88 -406 117 1 108 92 101 63 100 80 -407 117 1 110 102 120 68 127 102 -408 117 1 108 102 120 63 108 90 -409 117 1 110 98 111 58 116 103 -410 117 1 106 93 98 51 69 61 -400 118 1 92 77 76 57 72 54 -401 118 1 99 81 82 52 86 81 -402 118 1 117 104 116 61 128 128 -403 118 1 102 94 110 73 123 100 -404 118 1 83 70 73 60 92 71 -405 118 1 103 89 106 65 109 88 -406 118 1 84 66 68 49 73 64 -407 118 1 99 87 96 56 89 80 -408 118 1 111 99 110 57 93 87 -409 118 1 111 96 107 53 86 77 -410 118 1 104 89 90 46 88 83 -411 118 1 108 99 114 57 102 91 -412 118 1 99 86 93 56 83 76 -399 119 1 72 60 50 62 68 45 -400 119 1 81 68 60 67 80 59 -401 119 1 102 94 105 71 114 92 -402 119 1 102 94 105 71 114 92 -403 119 1 97 88 101 65 115 94 -404 119 1 97 83 89 60 90 61 -405 119 1 113 103 118 69 105 89 -406 119 1 94 78 85 55 87 69 -407 119 1 121 110 132 69 141 125 -408 119 1 149 136 154 77 150 149 -409 119 1 134 117 128 61 121 120 -410 119 1 96 76 77 38 58 55 -411 119 1 95 82 92 51 99 80 -412 119 1 109 96 113 70 99 86 -413 119 1 104 87 98 61 89 74 -399 120 1 83 68 71 66 63 39 -400 120 1 86 71 69 76 83 50 -401 120 1 96 87 95 75 109 76 -402 120 1 97 86 93 62 102 79 -403 120 1 98 85 95 66 98 79 -404 120 1 113 101 113 70 112 99 -405 120 1 98 85 96 59 93 81 -406 120 1 141 126 145 74 143 135 -407 120 1 159 147 168 81 154 147 -408 120 1 135 124 139 68 126 121 -409 120 1 107 87 95 55 90 82 -410 120 1 92 74 84 51 81 69 -411 120 1 110 99 117 66 107 95 -412 120 1 106 100 122 69 120 103 -413 120 1 100 87 97 55 95 81 -398 121 1 94 78 79 64 76 60 -399 121 1 95 83 84 65 80 64 -400 121 1 99 86 91 69 94 61 -401 121 1 103 92 102 68 106 81 -402 121 1 110 102 117 64 110 98 -403 121 1 114 105 117 73 124 100 -404 121 1 104 95 105 66 104 91 -405 121 1 102 87 92 55 95 87 -406 121 1 114 99 111 58 111 111 -407 121 1 128 112 128 61 107 104 -408 121 1 124 112 126 61 91 89 -409 121 1 104 90 94 59 88 81 -410 121 1 103 87 99 58 98 84 -411 121 1 110 97 112 62 105 94 -412 121 1 97 85 95 53 85 75 -397 122 1 102 90 93 75 95 68 -398 122 1 106 91 102 74 94 69 -399 122 1 111 98 111 68 97 80 -400 122 1 125 116 125 71 100 78 -401 122 1 107 95 104 64 94 76 -402 122 1 114 109 126 67 123 110 -403 122 1 121 115 133 75 130 108 -404 122 1 109 100 111 70 116 100 -405 122 1 98 86 92 59 86 75 -406 122 1 94 77 82 44 79 73 -407 122 1 96 79 84 43 88 80 -408 122 1 110 96 109 56 109 102 -409 122 1 109 96 109 58 99 92 -410 122 1 96 78 81 54 83 72 -411 122 1 104 88 97 56 91 78 -397 123 1 128 118 133 76 115 95 -398 123 1 117 102 118 70 103 82 -399 123 1 116 103 113 69 100 82 -400 123 1 128 118 131 71 97 86 -401 123 1 108 98 114 61 97 84 -402 123 1 118 112 131 67 110 96 -403 123 1 115 109 122 67 122 101 -404 123 1 104 99 113 71 107 83 -405 123 1 94 77 84 58 87 73 -406 123 1 99 87 96 55 81 65 -407 123 1 125 115 134 71 111 93 -408 123 1 132 122 140 76 134 118 -409 123 1 109 99 114 60 110 101 -410 123 1 102 84 95 51 86 74 -411 123 1 107 90 102 55 100 88 -397 124 1 112 94 103 54 95 84 -398 124 1 107 91 102 62 101 88 -399 124 1 113 102 114 68 114 98 -400 124 1 120 116 129 70 124 109 -401 124 1 126 122 141 72 131 113 -402 124 1 128 121 142 72 125 105 -403 124 1 121 117 136 74 114 94 -404 124 1 99 87 95 67 101 78 -405 124 1 91 74 81 63 107 80 -406 124 1 100 87 99 59 101 79 -407 124 1 119 109 126 67 121 106 -408 124 1 122 116 133 72 134 120 -409 124 1 102 95 109 58 106 95 -410 124 1 107 94 105 54 102 94 -399 125 1 106 102 118 68 116 102 -400 125 1 116 120 147 80 137 115 -401 125 1 126 123 147 78 131 108 -402 125 1 121 109 131 72 132 105 -403 125 1 121 117 136 74 114 94 -404 125 1 91 82 89 66 95 73 -405 125 1 81 68 67 61 96 71 -406 125 1 89 73 81 55 95 74 -407 125 1 105 94 107 60 113 100 -408 125 1 121 114 134 70 128 117 -409 125 1 110 100 119 61 107 95 -401 126 1 91 78 87 68 94 70 -402 126 1 89 75 76 66 97 74 -403 126 1 88 78 82 57 93 71 -404 126 1 93 82 86 60 93 76 -405 126 1 101 88 100 61 100 85 -406 126 1 110 98 115 62 113 98 -407 126 1 112 103 122 63 120 105 -408 126 1 111 98 115 59 113 98 -402 127 1 91 75 79 66 107 71 -403 127 1 95 84 89 71 113 90 -404 127 1 96 86 89 81 123 96 -405 127 1 114 102 116 68 126 111 -406 127 1 117 102 120 58 110 108 -407 127 1 120 107 126 64 117 103 -408 127 1 142 129 151 76 129 112 -404 128 1 88 77 79 86 114 80 -405 128 1 107 94 104 71 109 94 -406 128 1 106 89 100 55 94 92 -407 128 1 115 102 117 69 134 119 -406 129 1 92 76 77 66 108 81 -352 134 3 79 66 71 65 122 77 -353 134 3 82 67 77 73 131 83 -354 134 3 79 65 73 80 127 77 -355 134 3 77 65 64 80 109 63 -352 135 3 78 65 72 66 124 77 -353 135 3 81 68 78 77 137 84 -354 135 3 78 66 70 82 119 71 -355 135 3 71 57 50 76 79 45 -356 135 3 68 52 46 69 62 36 -357 135 3 74 59 56 72 85 50 -358 135 3 78 68 69 79 120 69 -359 135 3 83 69 74 76 124 73 -360 135 3 81 70 74 82 123 75 -352 136 3 81 67 78 78 138 84 -353 136 3 81 69 74 83 135 83 -354 136 3 75 63 56 82 99 58 -355 136 3 69 55 46 77 69 36 -356 136 3 68 52 43 73 56 30 -357 136 3 72 56 49 74 79 45 -358 136 3 78 68 69 79 120 69 -359 136 3 81 71 75 81 129 76 -360 136 3 85 75 78 82 123 73 -361 136 3 84 81 90 83 113 75 -362 136 3 84 83 98 88 126 89 -363 136 3 81 77 94 90 127 80 -352 137 3 81 68 72 87 121 75 -353 137 3 76 63 60 78 100 60 -354 137 3 71 56 46 76 75 40 -355 137 3 70 55 46 77 67 33 -356 137 3 67 54 45 75 62 34 -357 137 3 71 58 49 75 67 37 -358 137 3 78 68 69 80 124 73 -359 137 3 82 72 74 78 134 82 -360 137 3 88 81 90 77 137 93 -361 137 3 91 86 101 80 136 99 -362 137 3 83 76 85 83 122 85 -363 137 3 85 83 100 85 129 89 -352 138 3 72 58 52 79 81 47 -353 138 3 72 57 48 76 81 45 -354 138 3 72 55 47 80 74 38 -355 138 3 72 54 47 78 63 33 -356 138 3 69 53 44 77 56 28 -357 138 3 69 56 48 79 59 32 -358 138 3 76 66 59 80 99 55 -359 138 3 80 70 73 79 128 77 -360 138 3 84 76 86 80 126 87 -361 138 3 87 81 92 86 132 90 -362 138 3 83 75 82 80 134 84 -363 138 3 85 79 90 86 128 85 -352 139 3 73 57 50 78 73 41 -353 139 3 72 57 49 79 81 41 -354 139 3 71 57 50 79 75 37 -355 139 3 70 56 46 78 62 35 -356 139 3 69 58 47 80 61 35 -357 139 3 74 62 60 83 82 44 -358 139 3 76 64 59 83 87 50 -359 139 3 76 68 66 94 112 60 -360 139 3 77 69 60 96 106 64 -361 139 3 76 69 58 107 102 55 -362 139 3 78 70 68 98 120 70 -363 139 3 89 84 95 75 143 99 -352 140 3 69 55 51 77 79 42 -353 140 3 71 56 48 76 65 35 -354 140 3 73 55 48 79 62 34 -355 140 3 69 58 52 85 81 45 -356 140 3 78 67 69 88 123 72 -357 140 3 83 70 76 82 139 83 -358 140 3 76 68 66 90 111 61 -359 140 3 81 71 67 91 117 67 -360 140 3 80 69 69 100 111 59 -361 140 3 79 71 66 107 103 59 -362 140 3 85 74 75 85 128 79 -363 140 3 80 72 72 73 140 89 -352 141 3 73 58 55 81 92 50 -353 141 3 71 58 50 85 81 44 -354 141 3 73 60 50 91 77 40 -355 141 3 75 64 57 93 97 54 -356 141 3 81 71 73 85 137 80 -357 141 3 85 71 73 80 142 84 -358 141 3 78 68 65 90 113 66 -359 141 3 79 67 66 95 114 62 -360 141 3 85 72 74 93 133 77 -361 141 3 77 70 63 109 106 58 -362 141 3 80 73 70 89 119 74 -363 141 3 85 74 75 85 128 79 -352 142 3 74 63 56 96 104 56 -353 142 3 71 60 48 110 97 49 -354 142 3 70 59 47 111 88 41 -355 142 3 72 62 53 99 97 50 -356 142 3 79 68 64 89 122 71 -357 142 3 82 71 77 81 139 83 -358 142 3 78 68 67 86 123 75 -359 142 3 79 67 63 95 110 57 -360 142 3 85 72 76 91 132 77 -361 142 3 79 69 59 108 104 55 -362 142 3 82 76 75 95 119 72 -363 142 3 89 80 100 67 117 83 -352 143 3 76 64 61 91 105 58 -353 143 3 77 62 55 112 102 53 -354 143 3 73 60 47 117 91 42 -355 143 3 70 60 47 110 88 40 -356 143 3 76 64 58 101 100 55 -357 143 3 78 66 61 86 115 66 -358 143 3 82 70 73 81 121 75 -359 143 3 76 64 63 86 100 56 -360 143 3 81 71 69 90 116 67 -361 143 3 77 67 60 107 95 47 -362 143 3 82 74 69 102 109 62 -363 143 3 106 105 128 84 158 106 -352 144 3 77 66 65 77 114 67 -353 144 3 78 66 64 79 109 61 -354 144 3 76 61 56 91 97 56 -355 144 3 74 62 51 109 86 49 -356 144 3 77 64 58 98 78 46 -357 144 3 79 65 59 76 81 51 -358 144 3 78 61 62 63 106 66 -359 144 3 78 62 58 65 91 58 -360 144 3 81 72 72 81 104 65 -361 144 3 83 71 65 98 100 57 -362 144 3 80 73 66 99 112 65 -363 144 3 93 89 100 85 149 95 -352 145 3 77 67 67 79 113 69 -353 145 3 74 65 63 83 104 61 -354 145 3 79 65 64 89 102 60 -355 145 3 77 64 57 92 89 55 -356 145 3 79 64 59 64 63 47 -357 145 3 89 73 71 50 63 55 -358 145 3 78 62 63 65 96 63 -359 145 3 79 67 70 71 109 71 -360 145 3 87 74 76 87 120 75 -361 145 3 81 72 73 99 117 71 -362 145 3 84 76 76 87 137 88 -363 145 3 86 78 82 83 128 78 -352 146 3 85 73 73 72 100 67 -353 146 3 81 68 68 87 109 63 -354 146 3 78 67 62 87 95 49 -355 146 3 81 63 57 50 60 37 -356 146 3 91 71 66 39 57 48 -357 146 3 89 73 75 54 78 62 -358 146 3 79 66 68 62 108 71 -359 146 3 81 72 77 86 131 82 -360 146 3 81 72 77 86 131 82 -361 146 3 79 71 70 97 131 75 -362 146 3 85 74 84 77 145 94 -363 146 3 86 79 86 78 137 84 -352 147 3 86 73 76 90 100 70 -353 147 3 77 65 60 83 96 60 -354 147 3 76 65 57 67 85 51 -355 147 3 77 61 52 70 71 40 -356 147 3 80 68 60 66 74 48 -357 147 3 82 67 69 70 83 49 -358 147 3 79 66 63 68 88 61 -359 147 3 91 77 83 71 115 84 -360 147 3 87 77 79 85 122 85 -361 147 3 84 76 80 86 135 80 -362 147 3 84 74 83 74 139 88 -363 147 3 82 78 84 74 138 86 -352 148 3 88 75 76 92 99 57 -353 148 3 85 69 64 63 70 43 -354 148 3 81 65 61 58 68 47 -355 148 3 72 59 52 75 82 46 -356 148 3 74 62 54 65 81 50 -357 148 3 82 67 69 70 83 49 -358 148 3 86 70 67 66 85 62 -359 148 3 108 90 99 60 100 87 -360 148 3 110 93 103 66 101 88 -361 148 3 86 76 75 98 108 63 -362 148 3 84 73 73 92 130 79 -363 148 3 85 75 85 74 139 86 -352 149 3 100 84 84 68 97 79 -353 149 3 91 73 70 58 70 55 -354 149 3 80 63 60 69 82 55 -355 149 3 70 56 50 66 84 49 -356 149 3 73 59 52 61 81 48 -357 149 3 80 64 64 67 89 60 -358 149 3 82 66 66 55 86 63 -359 149 3 93 79 81 52 89 71 -360 149 3 107 87 93 64 93 73 -361 149 3 79 67 60 94 85 44 -362 149 3 76 67 61 101 110 61 -363 149 3 84 71 81 81 134 85 -264 169 5 71 50 43 49 65 40 -265 169 5 69 53 44 53 68 39 -266 169 5 69 50 46 57 72 39 -267 169 5 68 50 44 56 54 32 -268 169 5 70 52 48 51 57 35 -269 169 5 76 63 56 65 78 49 -270 169 5 75 62 61 67 98 59 -271 169 5 74 57 56 67 87 49 -272 169 5 72 55 50 60 66 38 -273 169 5 74 58 58 64 98 58 -274 169 5 74 59 59 63 100 63 -102 170 1 82 64 57 75 75 48 -103 170 1 87 72 74 64 90 68 -104 170 1 104 89 96 54 92 80 -264 170 5 70 50 41 48 56 35 -265 170 5 68 51 43 54 64 37 -266 170 5 68 56 49 56 70 40 -267 170 5 68 50 43 50 52 35 -268 170 5 70 53 46 57 64 38 -269 170 5 70 56 52 60 78 46 -270 170 5 71 52 48 58 81 48 -271 170 5 73 54 48 58 72 41 -272 170 5 70 55 57 61 89 54 -273 170 5 78 58 65 61 104 64 -274 170 5 74 60 64 60 100 63 -100 171 1 97 79 78 60 71 50 -101 171 1 95 76 77 59 76 54 -102 171 1 112 96 103 69 97 80 -103 171 1 99 82 87 58 94 75 -104 171 1 103 89 100 61 94 78 -264 171 5 68 50 44 53 62 34 -265 171 5 68 53 50 62 75 41 -266 171 5 71 56 53 63 76 45 -267 171 5 69 52 48 55 60 37 -268 171 5 68 49 41 56 48 25 -269 171 5 68 52 45 62 58 31 -270 171 5 69 50 44 59 62 35 -271 171 5 72 53 49 61 72 39 -272 171 5 73 56 55 60 90 55 -273 171 5 76 58 61 59 99 62 -274 171 5 70 56 59 58 93 54 -294 171 6 74 59 61 63 110 70 -295 171 6 73 58 59 64 105 65 -296 171 6 75 61 60 66 116 71 -297 171 6 78 60 62 66 115 71 -298 171 6 75 59 56 64 103 63 -299 171 6 75 60 62 63 108 65 -300 171 6 75 59 63 66 101 61 -301 171 6 74 57 56 63 94 55 -98 172 1 99 80 86 57 93 68 -99 172 1 99 79 79 45 63 54 -100 172 1 100 75 78 37 57 46 -101 172 1 97 73 78 48 62 45 -102 172 1 111 100 108 68 101 88 -103 172 1 111 97 107 65 98 86 -104 172 1 100 87 96 63 94 81 -264 172 5 69 53 52 60 77 44 -265 172 5 74 59 63 69 94 56 -266 172 5 71 56 53 63 76 45 -267 172 5 73 58 54 59 76 49 -268 172 5 68 50 40 54 49 27 -269 172 5 67 48 42 61 59 33 -270 172 5 66 51 45 60 62 33 -271 172 5 70 54 50 64 70 37 -272 172 5 72 56 55 62 84 51 -273 172 5 74 58 64 60 98 60 -274 172 5 72 58 60 60 93 54 -293 172 6 75 62 62 64 115 72 -294 172 6 74 59 58 62 115 73 -295 172 6 75 58 59 61 107 65 -296 172 6 76 58 62 62 115 71 -297 172 6 73 59 63 60 111 72 -298 172 6 75 61 65 59 117 73 -299 172 6 77 59 62 64 113 70 -300 172 6 75 58 63 62 108 68 -301 172 6 81 64 71 58 133 92 -96 173 1 114 105 122 76 120 93 -97 173 1 134 129 148 80 132 107 -98 173 1 131 114 139 71 119 98 -99 173 1 116 103 116 61 94 80 -100 173 1 98 78 84 42 65 54 -101 173 1 95 71 72 39 59 51 -102 173 1 111 100 108 68 101 88 -103 173 1 109 97 108 65 99 91 -104 173 1 98 82 88 57 96 84 -264 173 5 73 57 54 65 86 53 -265 173 5 77 63 69 72 109 66 -266 173 5 73 58 58 66 88 55 -267 173 5 76 57 55 59 81 54 -268 173 5 68 53 44 52 62 36 -269 173 5 66 49 42 60 60 35 -270 173 5 68 51 45 59 62 32 -271 173 5 69 55 52 60 77 43 -272 173 5 73 57 58 63 95 56 -273 173 5 75 60 63 63 105 62 -274 173 5 74 61 65 63 112 69 -293 173 6 74 62 63 65 117 70 -294 173 6 75 60 65 64 122 79 -295 173 6 75 58 62 60 110 74 -296 173 6 76 59 63 60 115 71 -297 173 6 77 64 69 62 125 82 -298 173 6 75 63 69 60 114 75 -299 173 6 79 65 73 63 126 84 -300 173 6 77 66 74 61 136 93 -301 173 6 77 65 79 57 141 102 -94 174 1 135 119 140 70 182 189 -95 174 1 113 95 110 62 116 108 -96 174 1 124 119 136 78 146 115 -97 174 1 149 154 185 103 187 154 -98 174 1 155 161 203 110 186 156 -99 174 1 149 152 189 104 178 150 -100 174 1 111 102 113 69 100 79 -101 174 1 95 76 79 50 71 54 -102 174 1 95 81 87 58 90 64 -103 174 1 107 94 107 66 105 89 -104 174 1 109 93 107 59 89 82 -105 174 1 104 90 99 56 90 79 -264 174 5 75 58 58 64 85 53 -265 174 5 76 61 63 66 95 59 -266 174 5 77 63 68 67 109 68 -267 174 5 75 57 60 57 89 57 -268 174 5 71 56 53 54 76 46 -269 174 5 69 52 48 61 75 46 -270 174 5 70 53 44 59 65 35 -271 174 5 69 54 52 59 68 39 -272 174 5 73 56 57 61 91 53 -273 174 5 74 60 64 63 102 60 -274 174 5 76 61 68 63 118 74 -293 174 6 75 61 61 66 113 68 -294 174 6 74 60 61 63 112 70 -295 174 6 75 57 64 61 115 78 -296 174 6 77 63 70 62 126 83 -297 174 6 78 64 70 60 130 87 -298 174 6 79 66 73 59 111 75 -299 174 6 78 65 73 59 111 77 -300 174 6 77 69 79 62 139 98 -301 174 6 81 70 81 60 152 107 -92 175 1 96 80 86 40 65 61 -93 175 1 101 82 88 57 102 92 -94 175 1 103 87 94 60 119 111 -95 175 1 109 95 101 60 102 91 -96 175 1 115 105 119 72 121 105 -97 175 1 139 136 164 94 166 133 -98 175 1 147 155 191 104 207 163 -99 175 1 143 146 179 100 196 163 -100 175 1 110 100 112 75 105 79 -101 175 1 97 84 93 62 90 70 -102 175 1 100 83 91 58 94 77 -103 175 1 104 88 98 58 102 90 -104 175 1 113 98 111 59 109 101 -105 175 1 115 101 113 58 110 102 -264 175 5 76 58 62 62 95 60 -265 175 5 74 59 64 66 114 69 -266 175 5 79 65 74 67 117 76 -267 175 5 79 64 69 58 103 68 -268 175 5 76 60 64 56 84 52 -269 175 5 70 53 51 58 87 51 -270 175 5 69 55 45 60 73 42 -271 175 5 68 55 46 62 69 41 -272 175 5 73 56 57 61 91 53 -273 175 5 74 57 64 58 101 62 -274 175 5 78 59 65 61 114 74 -293 175 6 76 60 66 63 117 74 -294 175 6 76 60 64 63 115 73 -295 175 6 78 64 72 65 134 87 -296 175 6 78 64 73 64 134 88 -297 175 6 77 62 67 61 121 78 -298 175 6 78 61 67 55 114 77 -299 175 6 79 65 72 56 113 80 -300 175 6 80 69 81 60 138 101 -301 175 6 81 69 83 61 148 107 -91 176 1 94 73 80 46 67 60 -92 176 1 92 69 73 30 38 37 -93 176 1 91 74 73 39 51 45 -94 176 1 101 84 85 48 61 50 -95 176 1 103 88 96 58 98 82 -96 176 1 108 90 101 56 102 100 -97 176 1 112 98 108 62 114 100 -98 176 1 120 116 129 80 151 122 -99 176 1 128 120 141 82 154 117 -100 176 1 102 85 99 62 105 77 -101 176 1 100 90 95 61 92 75 -102 176 1 104 90 100 61 95 82 -103 176 1 89 72 73 42 59 54 -104 176 1 95 77 78 44 64 62 -105 176 1 115 102 109 59 108 104 -264 176 5 74 61 64 65 108 67 -265 176 5 73 58 61 64 88 53 -266 176 5 81 65 76 63 123 78 -267 176 5 81 64 71 57 90 64 -268 176 5 74 58 57 57 83 51 -269 176 5 70 56 50 60 71 42 -270 176 5 69 55 48 63 67 37 -271 176 5 71 54 47 59 71 45 -272 176 5 75 57 61 57 89 53 -273 176 5 77 58 63 62 107 65 -274 176 5 76 62 68 62 114 72 -293 176 6 76 59 63 64 124 80 -294 176 6 77 59 64 67 130 83 -295 176 6 80 66 73 69 143 93 -296 176 6 76 66 74 67 137 88 -297 176 6 78 63 69 65 123 78 -298 176 6 79 64 69 60 116 77 -299 176 6 78 65 73 59 122 85 -300 176 6 80 68 80 60 139 99 -301 176 6 84 71 84 61 143 103 -91 177 1 104 82 91 45 61 54 -92 177 1 93 68 67 33 37 37 -93 177 1 94 75 76 36 42 36 -94 177 1 102 83 84 45 69 60 -95 177 1 102 82 92 50 80 71 -96 177 1 107 89 99 59 98 86 -97 177 1 104 89 100 64 89 77 -98 177 1 110 97 106 71 110 85 -99 177 1 101 88 100 60 99 79 -100 177 1 96 82 90 52 72 63 -101 177 1 107 89 100 53 85 82 -102 177 1 105 87 95 51 82 73 -103 177 1 105 87 95 51 82 73 -264 177 5 74 58 59 58 97 58 -265 177 5 72 56 58 59 97 56 -266 177 5 79 63 71 61 113 75 -267 177 5 83 67 69 58 91 66 -268 177 5 81 62 67 59 94 63 -269 177 5 72 57 60 59 92 57 -270 177 5 72 56 57 59 81 47 -271 177 5 71 54 48 59 77 44 -272 177 5 72 57 59 61 86 50 -273 177 5 74 56 61 61 107 63 -274 177 5 75 59 65 59 106 70 -293 177 6 77 59 62 64 113 73 -294 177 6 76 64 68 67 123 78 -295 177 6 80 67 71 68 125 81 -296 177 6 79 66 77 67 133 89 -297 177 6 78 63 69 65 123 78 -298 177 6 78 62 65 63 124 81 -299 177 6 76 60 64 60 119 80 -300 177 6 81 66 78 61 138 96 -301 177 6 84 74 88 62 148 105 -92 178 1 102 86 90 48 65 59 -93 178 1 99 76 80 39 54 49 -94 178 1 96 72 74 36 44 42 -95 178 1 92 73 74 43 49 44 -96 178 1 100 86 94 58 91 75 -97 178 1 105 85 94 58 89 76 -98 178 1 101 91 99 67 102 87 -99 178 1 99 82 81 55 73 61 -100 178 1 136 134 152 75 119 113 -101 178 1 138 124 142 67 106 99 -102 178 1 108 94 101 59 118 103 -264 178 5 74 58 62 57 99 62 -265 178 5 74 57 62 59 96 60 -266 178 5 76 61 63 58 82 53 -267 178 5 80 65 66 59 97 62 -268 178 5 77 58 64 61 90 59 -269 178 5 73 60 64 62 100 60 -270 178 5 72 58 57 55 81 49 -271 178 5 69 51 45 57 66 38 -272 178 5 69 55 49 62 78 49 -273 178 5 71 55 54 60 88 53 -274 178 5 76 59 62 63 93 55 -293 178 6 76 65 69 57 111 75 -294 178 6 80 66 73 59 122 81 -295 178 6 83 68 78 59 126 86 -296 178 6 82 69 79 59 128 88 -297 178 6 78 63 69 64 129 86 -298 178 6 78 61 65 61 122 83 -299 178 6 80 69 79 61 139 97 -300 178 6 80 69 79 61 139 97 -301 178 6 81 72 85 63 138 102 -92 179 1 111 91 102 56 91 79 -93 179 1 107 87 96 52 82 74 -94 179 1 102 83 90 45 65 59 -95 179 1 99 79 83 45 59 51 -96 179 1 99 85 85 62 75 66 -97 179 1 101 84 92 65 92 76 -98 179 1 105 93 97 58 85 72 -99 179 1 128 119 129 68 108 98 -100 179 1 163 168 194 89 145 144 -93 180 1 109 93 104 57 91 80 -94 180 1 108 90 100 58 94 81 -95 180 1 104 90 97 63 88 75 -96 180 1 101 84 86 63 80 71 -97 180 1 102 84 92 68 90 73 -98 180 1 106 95 101 55 84 76 -93 181 1 114 95 110 50 98 87 -94 181 1 110 91 99 60 98 84 -95 181 1 100 84 92 69 94 75 -96 181 1 104 89 97 66 89 79 -94 182 1 99 81 85 70 93 76 -192 188 6 68 48 36 15 15 13 -193 188 6 66 46 37 16 15 11 -194 188 6 66 46 35 14 13 13 -195 188 6 66 47 36 15 14 12 -196 188 6 66 46 36 14 13 11 -197 188 6 68 45 36 15 14 14 -198 188 6 64 45 35 14 12 13 -199 188 6 66 47 35 13 12 10 -200 188 6 65 47 35 13 13 11 -201 188 6 66 45 35 15 13 13 -202 188 6 64 44 34 15 12 12 -203 188 6 66 46 34 15 14 11 -204 188 6 65 46 35 15 13 13 -192 189 6 66 46 36 15 14 12 -193 189 6 68 46 36 15 14 11 -194 189 6 67 45 36 15 15 13 -195 189 6 65 47 36 15 14 11 -196 189 6 67 44 37 15 15 12 -197 189 6 67 47 37 15 13 12 -198 189 6 67 45 37 15 13 13 -199 189 6 64 46 33 15 13 12 -200 189 6 65 46 35 15 14 11 -201 189 6 66 44 36 15 14 12 -202 189 6 66 44 34 15 13 9 -203 189 6 66 46 36 15 12 10 -204 189 6 66 46 36 15 13 13 -193 190 6 69 46 36 14 13 12 -194 190 6 67 46 34 16 14 12 -195 190 6 67 46 36 15 14 14 -196 190 6 65 45 34 15 14 11 -197 190 6 66 48 39 16 14 10 -198 190 6 67 47 36 15 15 11 -199 190 6 64 45 35 15 14 14 -200 190 6 65 44 34 15 14 11 -201 190 6 66 45 36 15 13 13 -202 190 6 66 46 35 15 12 10 -203 190 6 64 46 36 14 13 11 -204 190 6 66 46 33 15 14 12 -193 191 6 68 47 36 15 15 11 -194 191 6 67 47 35 15 14 13 -195 191 6 68 46 37 14 13 11 -196 191 6 65 46 33 14 15 13 -197 191 6 66 46 39 15 14 12 -198 191 6 66 47 34 16 14 11 -199 191 6 66 45 36 15 13 12 -200 191 6 66 45 35 14 13 13 -201 191 6 66 46 35 14 12 11 -202 191 6 65 46 37 14 13 12 -203 191 6 65 46 34 15 15 11 -204 191 6 66 46 31 15 14 11 -193 192 6 66 47 36 15 14 12 -194 192 6 68 46 36 14 14 12 -195 192 6 68 46 37 14 13 11 -196 192 6 66 46 36 15 15 14 -197 192 6 68 48 39 15 13 12 -198 192 6 66 45 34 15 14 11 -199 192 6 67 46 34 15 13 12 -200 192 6 66 45 36 13 14 13 -201 192 6 68 45 35 14 12 10 -202 192 6 66 45 37 15 13 12 -203 192 6 66 45 33 15 14 12 -204 192 6 67 45 33 15 13 13 -194 193 6 66 46 35 15 14 13 -195 193 6 65 46 37 16 14 13 -196 193 6 67 47 37 14 15 11 -197 193 6 67 45 36 14 13 11 -198 193 6 64 47 33 15 14 14 -199 193 6 64 47 33 15 14 14 -200 193 6 67 44 37 14 15 11 -201 193 6 66 45 35 15 12 12 -202 193 6 66 46 35 14 12 13 -203 193 6 65 45 35 14 13 13 -204 193 6 66 45 35 15 14 12 -194 194 6 67 48 34 14 14 11 -195 194 6 67 45 36 16 13 11 -196 194 6 66 45 34 15 16 12 -197 194 6 67 47 37 15 13 12 -198 194 6 65 45 35 15 14 13 -199 194 6 65 45 34 15 14 11 -200 194 6 66 46 35 15 13 13 -201 194 6 67 44 36 15 14 14 -202 194 6 67 46 34 14 13 11 -203 194 6 66 46 35 15 13 12 -204 194 6 65 45 34 15 14 11 -194 195 6 67 46 35 15 15 12 -195 195 6 66 46 38 15 13 10 -196 195 6 68 47 34 15 15 12 -197 195 6 66 46 36 14 13 12 -198 195 6 65 46 33 15 15 10 -199 195 6 66 45 36 15 12 11 -200 195 6 66 45 33 14 14 15 -201 195 6 65 45 36 14 13 12 -202 195 6 66 45 35 15 14 10 -203 195 6 65 44 35 15 14 12 -204 195 6 66 47 36 14 14 13 -195 196 6 65 44 35 16 15 13 -196 196 6 67 45 37 15 14 13 -197 196 6 67 46 35 15 14 12 -198 196 6 66 46 34 15 15 11 -199 196 6 65 47 36 15 14 12 -200 196 6 67 45 35 15 14 13 -201 196 6 65 45 36 14 13 12 -202 196 6 64 45 35 15 15 11 -203 196 6 66 46 34 15 15 13 -294 227 4 86 73 78 55 73 53 -295 227 4 83 69 71 64 83 60 -296 227 4 86 71 70 69 85 62 -297 227 4 85 70 70 66 90 67 -298 227 4 89 74 77 64 80 60 -289 228 4 80 69 74 70 107 63 -290 228 4 80 71 79 64 107 73 -291 228 4 88 74 84 61 102 78 -292 228 4 95 82 93 56 90 70 -293 228 4 89 76 85 64 89 62 -294 228 4 83 70 71 73 90 61 -295 228 4 83 70 71 73 90 61 -296 228 4 86 71 73 73 90 62 -297 228 4 88 73 78 73 99 72 -298 228 4 92 78 83 78 102 75 -290 229 4 84 71 76 59 76 54 -291 229 4 87 75 84 62 102 69 -292 229 4 94 82 92 57 98 82 -293 229 4 92 82 93 64 102 76 -294 229 4 90 80 88 68 98 69 -295 229 4 93 81 88 68 98 72 -296 229 4 93 79 88 72 107 77 -297 229 4 98 84 96 66 110 84 -298 229 4 94 81 89 58 98 79 -290 230 4 79 63 66 62 83 60 -291 230 4 76 63 66 74 101 61 -292 230 4 89 74 81 61 98 73 -293 230 4 82 69 74 57 84 65 -294 230 4 85 70 75 52 77 58 -295 230 4 91 71 71 48 69 55 -296 230 4 86 68 69 53 81 58 -297 230 4 96 82 93 61 102 78 -298 230 4 100 90 102 62 95 80 -290 231 4 86 71 80 61 115 79 -291 231 4 75 60 60 74 89 53 -292 231 4 83 67 67 65 88 56 -133 244 4 69 56 44 71 72 38 -134 244 4 68 54 43 73 71 38 -135 244 4 73 59 53 80 82 45 -136 244 4 73 59 56 84 85 45 -137 244 4 69 61 50 83 92 48 -138 244 4 87 76 77 91 109 70 -139 244 4 93 82 79 113 113 72 -140 244 4 75 59 60 84 90 47 -133 245 4 73 60 53 77 93 51 -134 245 4 72 59 52 84 90 47 -135 245 4 76 66 58 87 104 58 -136 245 4 76 66 67 88 109 63 -137 245 4 80 67 67 82 116 67 -138 245 4 77 68 63 90 114 66 -139 245 4 88 81 73 107 115 79 -140 245 4 75 64 56 94 92 49 -133 246 4 73 60 53 77 93 51 -134 246 4 77 64 60 85 106 60 -135 246 4 77 68 66 85 112 65 -136 246 4 79 68 70 87 119 65 -137 246 4 81 72 74 85 128 77 -138 246 4 77 65 65 86 104 55 -139 246 4 81 68 64 81 83 51 -140 246 4 81 66 61 100 75 43 -134 247 4 80 71 73 83 123 73 -135 247 4 78 67 67 78 112 64 -136 247 4 77 64 62 86 108 59 -137 247 4 79 70 70 89 122 70 -138 247 4 80 65 66 80 114 68 -139 247 4 79 64 61 79 81 50 -140 247 4 75 64 59 90 79 43 -141 247 4 70 58 47 101 78 37 -250 247 4 69 55 48 63 68 38 -251 247 4 77 67 71 70 98 61 -134 248 4 79 68 68 79 118 70 -135 248 4 78 62 66 75 106 64 -136 248 4 86 74 78 74 110 72 -137 248 4 87 74 79 69 110 71 -138 248 4 82 71 71 76 106 69 -139 248 4 81 70 67 92 91 50 -140 248 4 81 72 67 91 100 56 -141 248 4 80 67 62 87 89 55 -250 248 4 80 63 60 64 81 52 -251 248 4 87 76 78 72 105 76 -252 248 4 85 76 75 89 109 77 -253 248 4 88 78 79 83 116 78 -254 248 4 89 80 89 73 103 77 -134 249 4 88 76 79 66 110 74 -135 249 4 84 66 74 64 96 68 -136 249 4 83 75 75 78 104 72 -137 249 4 86 74 78 77 90 63 -138 249 4 81 73 72 69 98 62 -139 249 4 80 70 67 91 108 68 -140 249 4 90 73 75 88 105 67 -141 249 4 86 69 65 77 84 50 -251 249 4 84 73 71 95 93 61 -252 249 4 76 61 49 107 84 41 -253 249 4 85 75 74 97 125 74 -254 249 4 83 78 85 87 133 83 -251 250 4 90 81 88 85 126 85 -252 250 4 82 70 65 90 77 47 -253 250 4 90 78 79 84 108 71 -254 250 4 85 79 84 90 134 86 -252 251 4 86 80 82 86 116 82 -253 251 4 84 78 77 98 112 73 -254 251 4 83 75 76 111 121 69 -378 253 6 74 59 55 60 100 61 -377 254 6 75 61 62 68 108 61 -378 254 6 74 59 57 49 71 48 -379 254 6 69 54 45 14 11 12 -376 255 6 70 55 49 66 78 44 -377 255 6 75 61 62 68 108 61 -378 255 6 74 60 57 42 82 53 -379 255 6 72 54 46 16 18 16 -376 256 6 76 61 59 68 106 62 -377 256 6 74 59 52 30 53 36 -378 256 6 69 54 44 14 6 7 -376 257 6 76 61 59 65 93 56 -377 257 6 73 57 52 23 44 30 -132 274 4 72 58 59 62 94 59 -132 275 4 74 58 58 64 97 58 -133 275 4 74 63 65 72 114 64 -134 275 4 74 63 65 72 114 64 -135 275 4 75 58 60 59 82 52 -136 275 4 73 55 51 54 49 31 -137 275 4 64 43 34 23 16 14 -132 276 4 72 57 54 60 97 56 -133 276 4 75 65 68 77 123 70 -134 276 4 73 53 55 49 61 37 -135 276 4 61 40 30 12 10 11 -136 276 4 64 42 34 19 24 20 -137 276 4 68 47 42 42 63 37 -132 277 4 71 55 51 61 91 52 -133 277 4 74 63 61 80 114 63 -134 277 4 74 59 55 67 74 41 -135 277 4 69 50 40 40 35 24 -136 277 4 64 44 35 18 41 30 -137 277 4 71 54 52 53 96 52 -132 278 4 71 56 50 65 75 44 -133 278 4 73 60 58 75 99 54 -134 278 4 79 64 64 79 122 69 -135 278 4 77 63 59 75 109 61 -136 278 4 70 54 46 43 57 37 -137 278 4 76 62 62 69 95 55 -132 279 4 73 58 54 63 85 52 -133 279 4 73 57 56 66 94 55 -134 279 4 76 61 61 71 104 62 -135 279 4 77 63 67 73 109 62 -136 279 4 76 62 60 68 106 63 -137 279 4 75 63 59 82 116 66 -360 290 5 68 48 46 67 109 60 -361 290 5 73 56 59 64 129 79 -362 290 5 78 60 70 66 130 82 -363 290 5 78 62 65 64 120 71 -364 290 5 76 60 61 66 110 68 -365 290 5 75 57 61 64 116 70 -366 290 5 76 63 67 63 124 75 -367 290 5 78 60 63 59 118 74 -368 290 5 76 59 68 62 125 77 -369 290 5 76 60 64 63 117 73 -370 290 5 75 60 63 66 121 75 -371 290 5 72 58 57 68 105 64 -360 291 5 85 72 82 71 127 86 -361 291 5 88 75 84 69 130 87 -362 291 5 78 62 72 65 137 86 -363 291 5 80 64 68 64 124 78 -364 291 5 72 57 58 67 110 65 -365 291 5 77 60 68 63 124 78 -366 291 5 77 60 65 60 125 80 -367 291 5 78 63 66 61 123 74 -368 291 5 75 60 64 62 127 77 -369 291 5 75 62 67 61 126 79 -370 291 5 74 60 61 68 117 70 -371 291 5 72 56 50 73 96 52 -361 292 5 80 62 70 64 127 82 -362 292 5 77 62 66 64 119 75 -363 292 5 78 61 67 63 121 75 -364 292 5 74 59 65 63 117 73 -365 292 5 78 62 69 63 123 77 -366 292 5 76 60 65 62 122 76 -367 292 5 78 62 70 63 132 79 -368 292 5 75 60 69 64 127 79 -369 292 5 77 63 71 61 130 85 -370 292 5 74 59 56 64 102 61 -371 292 5 70 53 47 66 90 45 -361 293 5 73 58 62 64 119 82 -362 293 5 74 60 64 65 119 74 -363 293 5 77 59 65 63 124 75 -364 293 5 79 62 66 63 123 77 -365 293 5 77 60 68 64 125 80 -366 293 5 77 62 70 63 130 80 -367 293 5 80 62 75 63 133 85 -368 293 5 77 61 70 63 128 82 -369 293 5 74 55 54 59 90 54 -370 293 5 73 57 52 60 89 51 -371 293 5 76 57 62 63 120 73 -361 294 5 69 54 53 61 96 62 -362 294 5 76 60 63 67 114 69 -363 294 5 76 61 66 63 123 74 -364 294 5 79 62 70 65 131 81 -365 294 5 77 60 66 63 122 76 -366 294 5 75 61 66 62 124 76 -367 294 5 77 61 67 61 120 70 -368 294 5 77 59 66 63 116 71 -369 294 5 73 56 53 63 92 51 -370 294 5 76 58 60 63 112 67 -371 294 5 79 60 68 63 129 80 -361 295 5 73 58 59 61 85 51 -362 295 5 74 58 58 70 103 58 -363 295 5 77 61 63 65 117 72 -364 295 5 79 63 72 64 132 83 -365 295 5 76 59 63 61 122 73 -366 295 5 73 58 60 60 109 65 -367 295 5 76 59 55 61 96 56 -368 295 5 76 56 58 60 98 60 -369 295 5 71 57 54 67 92 52 -370 295 5 74 56 60 64 108 63 -371 295 5 77 57 61 60 108 66 -361 296 5 76 58 63 69 104 60 -362 296 5 79 63 67 61 127 80 -363 296 5 75 58 67 59 122 78 -364 296 5 75 56 59 59 105 62 -365 296 5 73 57 55 61 88 51 -366 296 5 71 58 55 68 99 56 -367 296 5 74 56 53 61 87 51 -368 296 5 71 56 55 66 90 53 -369 296 5 73 55 54 64 92 53 -370 296 5 72 54 50 63 84 51 -371 296 5 70 57 49 68 89 50 -361 297 5 74 57 64 67 113 67 -362 297 5 76 63 68 61 126 82 -363 297 5 76 59 65 58 115 71 -364 297 5 74 59 59 60 99 61 -365 297 5 73 59 54 67 99 55 -366 297 5 72 56 54 72 107 57 -367 297 5 73 59 55 69 100 55 -368 297 5 73 59 54 67 97 56 -369 297 5 73 58 53 69 93 54 -370 297 5 73 56 52 72 85 48 -371 297 5 72 58 54 70 94 52 -361 298 5 78 61 68 60 116 74 -362 298 5 77 60 61 60 111 69 -363 298 5 74 59 62 64 110 64 -364 298 5 76 60 65 63 110 68 -365 298 5 71 58 54 65 106 62 -366 298 5 71 54 54 65 95 52 -367 298 5 71 55 51 71 101 55 -368 298 5 73 57 51 66 96 55 -369 298 5 72 57 53 67 98 57 -370 298 5 75 59 59 68 105 60 -371 298 5 76 60 64 63 115 69 -362 299 5 75 58 64 60 112 72 -363 299 5 75 57 59 62 112 67 -364 299 5 72 55 57 65 101 60 -365 299 5 71 56 51 66 91 52 -366 299 5 72 55 52 65 94 55 -367 299 5 71 57 50 65 95 53 -368 299 5 71 57 50 65 95 53 -369 299 5 72 55 56 66 102 59 -370 299 5 76 60 63 60 124 77 -371 299 5 81 65 70 53 130 85 -362 300 5 74 58 64 59 115 74 -363 300 5 74 57 59 60 113 70 -364 300 5 74 59 58 64 112 66 -365 300 5 74 59 61 65 106 63 -366 300 5 76 58 56 65 105 61 -367 300 5 72 56 54 66 103 60 -368 300 5 75 57 58 61 111 65 -369 300 5 76 59 65 55 117 79 -370 300 5 84 72 81 56 130 94 -371 300 5 85 72 82 59 128 96 -362 301 5 74 57 65 60 118 74 -363 301 5 73 57 59 63 107 64 -364 301 5 76 58 60 64 118 71 -365 301 5 77 60 65 66 123 75 -366 301 5 73 56 57 62 106 63 -367 301 5 75 59 59 62 111 67 -368 301 5 71 56 57 58 104 63 -369 301 5 81 64 69 56 106 77 -370 301 5 89 75 84 57 118 95 -371 301 5 77 66 68 59 112 73 -362 302 5 75 58 62 62 116 70 -363 302 5 75 58 56 68 107 61 -364 302 5 76 60 65 67 118 72 -365 302 5 76 59 65 66 121 76 -366 302 5 73 56 57 62 106 63 -367 302 5 70 56 58 59 93 54 -368 302 5 75 63 62 57 102 69 -369 302 5 86 76 86 59 117 94 -370 302 5 83 70 80 59 113 84 -371 302 5 73 57 58 59 100 58 -363 303 5 72 56 52 68 99 54 -364 303 5 74 58 64 68 115 70 -365 303 5 74 61 59 66 109 66 -366 303 5 71 58 60 60 97 59 -367 303 5 69 59 59 57 92 53 -368 303 5 83 71 79 59 115 83 -369 303 5 87 74 84 61 124 94 -370 303 5 78 59 65 58 106 64 -371 303 5 73 54 56 57 92 58 -345 307 7 72 59 50 66 73 41 -346 307 7 83 71 70 70 88 58 -347 307 7 112 103 117 65 132 115 -348 307 7 125 118 135 65 138 133 -349 307 7 125 112 129 64 126 118 -350 307 7 121 108 122 59 124 117 -351 307 7 125 113 126 62 131 129 -345 308 7 72 60 47 69 82 46 -346 308 7 79 66 61 77 85 52 -347 308 7 116 105 115 61 112 102 -348 308 7 123 113 126 65 127 125 -349 308 7 125 113 126 63 116 114 -350 308 7 118 108 121 63 107 103 -351 308 7 132 125 148 71 130 124 -198 309 5 69 53 54 62 82 49 -199 309 5 72 54 57 56 94 58 -200 309 5 72 56 57 65 98 60 -201 309 5 74 59 60 70 107 65 -202 309 5 75 59 60 68 116 70 -203 309 5 71 59 54 69 112 68 -204 309 5 74 58 63 62 101 63 -205 309 5 77 59 68 63 111 68 -206 309 5 76 56 62 57 117 71 -207 309 5 73 58 59 61 118 71 -208 309 5 75 61 67 62 127 82 -209 309 5 80 65 70 63 122 84 -345 309 7 74 58 52 72 83 47 -346 309 7 92 79 84 64 95 72 -347 309 7 124 108 124 60 124 117 -348 309 7 138 123 135 64 142 137 -349 309 7 138 123 135 64 142 137 -350 309 7 146 135 148 71 145 139 -351 309 7 143 132 150 73 142 145 -198 310 5 68 51 50 60 73 42 -199 310 5 69 53 50 63 74 44 -200 310 5 71 56 53 66 86 49 -201 310 5 74 58 57 71 104 62 -202 310 5 74 57 57 69 106 65 -203 310 5 71 57 53 66 96 55 -204 310 5 75 60 68 64 112 71 -205 310 5 77 63 68 63 120 75 -206 310 5 71 53 49 53 80 48 -207 310 5 71 54 54 61 94 57 -208 310 5 74 58 62 65 110 69 -209 310 5 78 60 66 64 109 70 -345 310 7 72 56 47 70 74 41 -346 310 7 79 68 68 73 96 58 -347 310 7 113 101 113 60 117 113 -348 310 7 149 137 151 70 165 159 -349 310 7 170 157 176 80 182 183 -350 310 7 144 132 151 72 150 162 -351 310 7 147 138 164 79 145 145 -199 311 5 71 55 56 63 85 49 -200 311 5 73 56 56 65 85 51 -201 311 5 74 58 58 70 100 60 -202 311 5 73 57 56 68 100 58 -203 311 5 73 59 59 71 102 56 -204 311 5 77 62 68 68 121 75 -205 311 5 75 55 57 53 98 62 -206 311 5 69 52 44 52 60 36 -207 311 5 71 53 49 61 76 43 -208 311 5 74 61 64 68 120 72 -209 311 5 76 62 65 66 109 67 -345 311 7 68 52 43 62 59 35 -346 311 7 72 58 53 64 75 43 -347 311 7 97 87 102 62 111 89 -348 311 7 132 125 148 71 151 143 -349 311 7 151 140 166 77 161 161 -350 311 7 139 132 155 75 139 138 -351 311 7 153 143 170 82 152 150 -199 312 5 71 56 56 58 91 54 -200 312 5 71 56 53 60 85 54 -201 312 5 71 55 53 64 92 53 -202 312 5 73 58 58 70 98 55 -203 312 5 73 57 56 70 109 62 -204 312 5 74 56 59 57 108 69 -205 312 5 71 53 51 50 72 44 -206 312 5 70 52 46 55 63 38 -207 312 5 71 53 47 58 73 43 -208 312 5 75 59 61 65 117 69 -209 312 5 75 62 67 65 125 78 -345 312 7 69 50 43 58 48 30 -346 312 7 72 55 45 64 56 32 -347 312 7 80 64 70 65 94 54 -348 312 7 102 95 111 69 122 103 -349 312 7 123 114 131 69 130 123 -350 312 7 129 117 130 63 119 113 -351 312 7 128 115 130 64 126 121 -199 313 5 71 57 51 64 79 47 -200 313 5 71 58 55 65 87 51 -201 313 5 75 60 60 68 99 58 -202 313 5 71 57 55 66 94 54 -203 313 5 70 56 56 64 99 58 -204 313 5 71 51 49 49 73 47 -205 313 5 70 52 48 58 65 37 -206 313 5 70 53 48 58 70 40 -207 313 5 67 52 46 54 62 38 -208 313 5 72 56 53 61 95 55 -209 313 5 74 59 60 66 119 71 -345 313 7 66 49 42 57 48 27 -346 313 7 72 55 47 69 72 41 -347 313 7 66 54 45 66 64 32 -348 313 7 82 71 79 71 89 59 -349 313 7 117 106 114 64 111 108 -350 313 7 117 104 109 60 104 100 -351 313 7 104 89 95 58 84 70 -199 314 5 73 58 56 64 86 51 -200 314 5 74 59 59 61 96 61 -201 314 5 75 59 63 61 105 65 -202 314 5 72 56 53 54 82 48 -203 314 5 68 52 49 52 60 37 -204 314 5 69 54 46 59 64 37 -205 314 5 67 51 46 58 66 36 -206 314 5 69 51 44 58 62 35 -207 314 5 69 53 46 61 65 37 -208 314 5 72 57 54 62 94 53 -209 314 5 71 54 51 54 92 56 -199 315 5 72 59 59 62 98 59 -200 315 5 77 60 65 60 112 70 -201 315 5 77 59 69 58 111 69 -202 315 5 74 57 59 54 85 51 -203 315 5 73 54 54 56 80 48 -204 315 5 68 56 47 61 70 40 -205 315 5 67 53 44 62 64 35 -206 315 5 70 51 45 62 70 37 -207 315 5 70 50 45 62 69 38 -208 315 5 68 56 50 64 74 42 -209 315 5 71 53 49 60 74 44 -199 316 5 76 63 70 67 116 69 -200 316 5 80 63 76 61 126 76 -201 316 5 77 59 69 58 111 69 -202 316 5 77 61 70 60 112 70 -203 316 5 74 57 61 61 102 61 -204 316 5 69 54 48 62 79 44 -205 316 5 68 51 42 59 62 34 -206 316 5 67 52 43 61 60 33 -207 316 5 69 53 43 65 65 35 -208 316 5 69 52 47 63 66 37 -209 316 5 70 53 47 64 71 40 -199 317 5 79 64 75 68 130 83 -200 317 5 77 63 70 60 121 74 -201 317 5 77 62 72 61 118 73 -202 317 5 78 62 68 64 120 74 -203 317 5 73 58 62 63 105 63 -204 317 5 72 54 52 61 79 48 -205 317 5 69 50 43 59 62 35 -206 317 5 66 52 43 63 63 36 -207 317 5 67 52 43 65 68 38 -208 317 5 70 52 43 67 69 38 -209 317 5 69 54 44 67 71 38 -240 317 3 75 67 72 89 101 64 -241 317 3 77 70 78 89 109 67 -242 317 3 88 84 101 85 136 102 -243 317 3 90 81 89 94 133 105 -244 317 3 86 80 90 94 134 98 -245 317 3 78 74 74 106 118 77 -246 317 3 91 84 98 93 135 101 -247 317 3 86 80 89 89 120 85 -248 317 3 80 74 75 91 109 73 -274 317 3 73 63 56 105 100 51 -275 317 3 74 63 51 113 98 49 -276 317 3 74 63 50 117 100 49 -277 317 3 74 66 54 116 102 54 -278 317 3 75 66 53 110 103 51 -279 317 3 72 63 56 107 96 46 -280 317 3 74 64 56 110 98 49 -281 317 3 78 71 65 99 121 66 -282 317 3 80 71 66 99 122 69 -283 317 3 80 68 69 94 127 76 -284 317 3 80 73 69 103 123 68 -285 317 3 81 72 69 94 126 73 -286 317 3 80 75 77 84 131 79 -287 317 3 81 68 70 86 113 71 -288 317 3 76 65 63 98 108 70 -289 317 3 88 75 83 94 138 90 -199 318 5 75 58 65 60 119 72 -200 318 5 73 59 61 60 107 69 -201 318 5 74 61 66 64 115 70 -202 318 5 75 58 62 59 107 64 -203 318 5 75 61 63 62 104 62 -204 318 5 75 58 58 60 96 57 -205 318 5 73 53 52 59 79 46 -206 318 5 71 54 52 65 77 43 -207 318 5 70 54 52 67 80 47 -208 318 5 68 52 42 71 75 44 -209 318 5 66 54 44 68 73 37 -240 318 3 83 77 87 90 121 82 -241 318 3 81 73 75 92 117 75 -242 318 3 79 72 73 96 108 67 -243 318 3 77 71 74 100 109 67 -244 318 3 78 71 79 96 128 85 -245 318 3 81 75 83 94 120 77 -246 318 3 83 77 80 94 115 75 -247 318 3 95 93 108 81 130 106 -248 318 3 85 80 84 92 124 91 -249 318 3 72 60 52 102 96 47 -274 318 3 72 63 53 110 99 50 -275 318 3 73 63 52 112 98 48 -276 318 3 74 63 54 114 103 53 -277 318 3 76 66 56 116 107 57 -278 318 3 75 67 55 109 108 59 -279 318 3 72 62 53 106 98 53 -280 318 3 74 64 55 115 96 45 -281 318 3 74 69 61 114 111 57 -282 318 3 77 68 57 110 107 59 -283 318 3 78 69 66 102 123 66 -284 318 3 78 71 64 98 120 67 -285 318 3 78 68 68 94 122 73 -286 318 3 79 70 64 98 118 65 -287 318 3 74 62 55 82 101 55 -288 318 3 105 96 110 79 139 116 -289 318 3 88 75 83 94 138 90 -240 319 3 91 89 103 82 137 108 -241 319 3 93 89 106 79 136 106 -242 319 3 91 84 100 85 128 98 -243 319 3 88 84 105 85 133 102 -244 319 3 90 86 111 81 147 110 -245 319 3 88 83 92 88 133 94 -246 319 3 94 91 105 83 136 109 -247 319 3 83 77 80 96 120 88 -248 319 3 71 62 51 101 103 61 -249 319 3 76 64 56 97 102 58 -275 319 3 73 61 50 108 100 52 -276 319 3 74 64 58 106 108 56 -277 319 3 76 64 55 111 112 55 -278 319 3 74 61 53 95 100 53 -279 319 3 70 57 47 80 84 47 -280 319 3 72 64 52 90 90 47 -281 319 3 74 66 58 104 106 53 -282 319 3 75 63 51 101 99 53 -283 319 3 77 67 58 110 105 51 -284 319 3 73 66 53 104 105 54 -285 319 3 73 63 58 107 96 49 -286 319 3 75 65 49 115 95 45 -287 319 3 69 57 39 101 83 34 -288 319 3 127 118 141 81 164 136 -289 319 3 109 99 113 89 152 118 -240 320 3 78 69 65 95 108 67 -241 320 3 79 75 78 90 116 81 -242 320 3 85 77 88 91 123 90 -243 320 3 81 74 81 97 122 86 -244 320 3 74 66 64 108 114 76 -245 320 3 88 85 96 89 137 105 -246 320 3 77 68 65 103 109 74 -247 320 3 75 61 54 104 95 48 -248 320 3 74 64 58 95 94 49 -249 320 3 75 63 57 100 92 51 -275 320 3 72 60 51 105 91 46 -276 320 3 74 66 60 96 113 61 -277 320 3 76 65 60 96 114 61 -278 320 3 74 59 57 76 98 54 -279 320 3 72 59 55 70 80 47 -280 320 3 74 64 55 79 92 56 -281 320 3 76 63 55 95 95 52 -282 320 3 74 64 55 103 108 60 -283 320 3 75 63 54 104 101 52 -284 320 3 74 67 51 112 107 56 -285 320 3 74 63 53 112 103 51 -286 320 3 71 63 48 116 99 49 -287 320 3 70 57 36 118 85 36 -288 320 3 107 98 114 94 145 117 -289 320 3 137 127 149 93 185 154 -348 320 7 116 111 127 70 125 110 -349 320 7 125 118 135 70 120 113 -350 320 7 137 132 154 74 137 131 -240 321 3 90 88 96 79 112 83 -241 321 3 86 77 83 87 104 69 -242 321 3 82 71 73 95 105 59 -243 321 3 75 68 65 104 101 52 -244 321 3 76 65 59 111 87 41 -245 321 3 73 62 55 114 104 59 -246 321 3 74 67 59 103 89 42 -247 321 3 78 69 65 89 96 50 -248 321 3 77 70 68 91 107 68 -249 321 3 77 69 62 99 95 58 -275 321 3 75 65 60 91 116 63 -276 321 3 75 65 60 91 116 63 -277 321 3 77 66 65 83 120 66 -278 321 3 77 62 61 81 103 59 -279 321 3 75 67 63 92 106 60 -280 321 3 77 69 68 91 116 66 -281 321 3 77 63 58 92 96 54 -282 321 3 78 69 60 105 109 56 -283 321 3 75 64 53 105 110 56 -284 321 3 76 67 52 113 110 57 -285 321 3 75 66 56 112 110 55 -286 321 3 77 68 58 114 107 57 -287 321 3 75 68 53 118 103 50 -288 321 3 81 72 69 105 111 65 -289 321 3 124 116 131 97 177 144 -348 321 7 130 119 138 67 127 122 -349 321 7 132 116 128 65 119 117 -350 321 7 130 120 137 63 122 119 -240 322 3 82 77 78 88 117 91 -241 322 3 86 75 83 89 120 92 -242 322 3 89 78 89 88 124 94 -243 322 3 82 78 80 88 113 80 -244 322 3 73 61 54 85 86 49 -245 322 3 79 69 61 107 89 46 -246 322 3 81 76 82 92 113 76 -247 322 3 90 87 102 80 129 96 -248 322 3 89 83 94 78 123 95 -249 322 3 81 74 80 89 109 77 -275 322 3 72 63 55 89 103 56 -276 322 3 65 53 44 94 91 45 -277 322 3 86 74 74 97 113 68 -278 322 3 109 105 115 105 131 95 -279 322 3 97 93 102 97 128 85 -280 322 3 77 66 64 82 108 62 -281 322 3 90 79 81 89 121 89 -282 322 3 90 84 90 95 135 94 -283 322 3 83 73 69 101 114 62 -284 322 3 83 75 73 99 117 74 -285 322 3 87 79 76 100 127 85 -286 322 3 85 77 77 99 120 79 -287 322 3 80 74 66 100 119 78 -288 322 3 80 74 66 100 119 78 -289 322 3 82 76 71 101 116 68 -348 322 7 116 101 112 54 99 97 -349 322 7 124 110 123 59 113 109 -350 322 7 109 93 95 54 85 87 -240 323 3 85 75 83 85 107 70 -241 323 3 89 81 93 80 120 93 -242 323 3 99 97 118 77 136 121 -243 323 3 96 89 103 75 110 93 -244 323 3 75 59 57 68 72 45 -245 323 3 74 60 58 87 87 52 -246 323 3 87 84 98 83 128 98 -247 323 3 97 97 118 74 144 125 -248 323 3 90 86 95 78 122 99 -249 323 3 79 69 72 97 101 65 -250 323 3 81 74 81 94 123 88 -276 323 3 76 65 60 94 78 30 -277 323 3 95 89 92 93 126 91 -278 323 3 154 156 182 109 215 199 -279 323 3 132 129 145 114 205 174 -280 323 3 79 69 73 83 120 80 -281 323 3 95 88 99 82 152 109 -282 323 3 85 73 81 88 139 88 -283 323 3 89 84 91 88 137 96 -284 323 3 90 79 87 87 146 97 -285 323 3 88 80 84 85 144 94 -286 323 3 87 76 82 82 135 87 -287 323 3 86 79 87 84 131 88 -288 323 3 89 81 87 92 127 87 -289 323 3 79 72 67 103 108 61 -240 324 3 91 88 108 73 131 109 -241 324 3 101 102 130 73 140 122 -242 324 3 102 95 113 68 115 100 -243 324 3 76 61 60 66 73 46 -244 324 3 72 59 55 72 85 53 -245 324 3 88 86 104 77 127 97 -246 324 3 99 98 121 74 136 118 -247 324 3 90 83 89 84 113 88 -248 324 3 90 83 89 84 113 88 -249 324 3 73 62 55 109 91 54 -250 324 3 69 56 51 119 86 43 -276 324 3 76 65 60 94 78 30 -277 324 3 90 84 89 85 113 80 -278 324 3 130 130 147 101 209 196 -279 324 3 120 119 129 113 209 189 -280 324 3 79 70 74 77 136 86 -281 324 3 89 78 86 78 178 115 -282 324 3 86 73 77 77 151 92 -283 324 3 87 78 80 87 155 102 -284 324 3 83 72 76 86 134 86 -285 324 3 85 72 75 79 130 82 -286 324 3 82 70 75 84 128 80 -287 324 3 85 74 78 86 144 93 -288 324 3 83 71 71 96 132 86 -289 324 3 88 78 78 99 126 84 -240 325 3 90 86 116 75 134 108 -241 325 3 95 98 127 73 135 116 -242 325 3 98 92 104 70 114 95 -243 325 3 73 59 54 73 77 47 -244 325 3 77 68 71 76 98 66 -245 325 3 92 90 110 71 133 105 -246 325 3 96 94 116 76 130 105 -247 325 3 87 78 80 84 104 76 -248 325 3 81 71 72 86 103 70 -249 325 3 77 64 62 106 97 60 -250 325 3 72 59 44 122 86 42 -276 325 3 104 97 101 88 115 85 -277 325 3 109 103 109 91 120 83 -278 325 3 106 102 117 101 154 122 -279 325 3 94 88 87 97 156 121 -280 325 3 83 68 66 69 128 82 -281 325 3 79 65 61 55 95 59 -282 325 3 76 64 58 60 74 47 -283 325 3 80 69 70 84 133 83 -284 325 3 78 65 63 81 124 74 -285 325 3 81 66 64 80 125 75 -286 325 3 81 70 69 88 122 74 -287 325 3 82 72 73 89 120 71 -288 325 3 80 68 67 100 114 67 -289 325 3 78 62 57 107 108 65 -240 326 3 92 87 113 73 129 104 -241 326 3 97 96 124 71 131 116 -242 326 3 92 83 94 67 103 83 -243 326 3 72 56 50 69 69 40 -244 326 3 74 63 63 78 92 62 -245 326 3 94 92 109 74 135 109 -246 326 3 83 77 81 91 113 81 -247 326 3 73 61 52 102 89 51 -248 326 3 83 72 78 83 105 74 -249 326 3 92 85 99 73 119 93 -250 326 3 89 83 91 82 121 89 -276 326 3 133 129 145 87 160 140 -277 326 3 131 125 144 85 165 147 -278 326 3 104 93 102 91 143 118 -279 326 3 87 74 71 78 106 72 -280 326 3 74 58 50 35 50 28 -281 326 3 71 54 47 20 25 20 -282 326 3 75 65 59 61 57 35 -283 326 3 76 63 58 84 98 56 -284 326 3 76 61 59 91 106 60 -285 326 3 78 67 62 93 113 66 -286 326 3 81 72 74 88 130 80 -287 326 3 83 73 77 84 137 83 -288 326 3 82 72 75 87 125 72 -289 326 3 80 66 63 102 118 66 -389 326 1 116 107 128 68 151 128 -390 326 1 104 91 101 62 111 85 -391 326 1 104 91 101 62 111 85 -392 326 1 119 104 112 59 107 92 -393 326 1 113 95 110 52 87 80 -394 326 1 95 76 82 47 65 53 -395 326 1 86 70 70 67 83 53 -396 326 1 84 70 75 73 100 68 -397 326 1 81 69 68 63 82 53 -398 326 1 72 58 51 73 70 42 -399 326 1 69 56 46 84 77 35 -240 327 3 95 87 107 71 120 102 -241 327 3 102 97 119 72 127 109 -242 327 3 86 75 84 61 98 79 -243 327 3 68 54 47 59 68 39 -244 327 3 68 56 48 83 77 40 -245 327 3 84 78 87 85 112 83 -246 327 3 78 70 65 96 101 61 -247 327 3 69 54 38 115 85 39 -248 327 3 69 55 40 120 84 39 -249 327 3 77 68 63 105 98 58 -250 327 3 82 74 76 92 106 73 -277 327 3 122 115 122 97 156 133 -278 327 3 99 92 95 96 142 113 -279 327 3 81 71 65 69 93 65 -280 327 3 70 53 48 28 15 11 -281 327 3 71 55 49 34 12 9 -282 327 3 75 65 59 61 57 35 -283 327 3 78 66 61 91 107 58 -284 327 3 74 62 55 98 103 53 -285 327 3 76 66 59 95 111 58 -286 327 3 81 68 66 94 120 65 -287 327 3 80 69 65 87 115 71 -288 327 3 82 73 77 83 123 83 -289 327 3 80 70 65 94 114 64 -380 327 1 157 147 171 93 161 142 -381 327 1 119 100 109 57 94 95 -382 327 1 97 74 77 42 82 78 -383 327 1 111 96 103 57 88 84 -384 327 1 108 89 100 54 86 78 -385 327 1 105 84 91 47 77 73 -386 327 1 116 95 104 58 81 79 -387 327 1 147 137 161 89 165 145 -388 327 1 140 137 163 86 188 160 -389 327 1 106 95 109 57 110 95 -390 327 1 78 58 63 46 80 57 -391 327 1 93 77 78 60 83 56 -392 327 1 97 84 95 66 96 75 -393 327 1 91 74 78 63 89 68 -394 327 1 88 74 76 65 80 58 -395 327 1 85 72 76 65 99 71 -396 327 1 81 66 65 75 88 57 -397 327 1 73 63 56 80 88 49 -398 327 1 69 54 44 85 72 37 -399 327 1 69 56 41 98 80 39 -240 328 3 92 83 100 70 114 92 -241 328 3 104 99 122 71 125 108 -242 328 3 86 75 85 55 88 71 -243 328 3 67 52 44 58 65 38 -244 328 3 73 62 53 88 79 39 -245 328 3 84 77 81 88 114 82 -246 328 3 90 86 99 78 128 100 -247 328 3 82 74 82 89 106 69 -248 328 3 73 61 51 114 86 41 -249 328 3 67 54 32 129 83 39 -250 328 3 67 53 37 126 85 39 -277 328 3 85 76 72 98 126 86 -278 328 3 81 70 65 91 113 67 -279 328 3 79 66 65 64 79 51 -280 328 3 77 64 56 52 57 38 -281 328 3 75 62 58 76 80 43 -282 328 3 79 68 63 91 118 69 -283 328 3 73 65 59 95 103 50 -284 328 3 82 69 63 94 97 50 -285 328 3 82 69 63 94 97 50 -286 328 3 87 78 82 93 113 70 -287 328 3 90 77 80 82 114 80 -288 328 3 89 82 87 74 102 81 -289 328 3 82 72 71 100 118 75 -380 328 1 119 99 107 63 109 105 -381 328 1 104 81 85 46 76 72 -382 328 1 101 83 87 46 83 77 -383 328 1 98 81 85 49 87 77 -384 328 1 105 85 94 51 88 82 -385 328 1 107 87 96 49 90 86 -386 328 1 109 90 96 48 87 80 -387 328 1 110 90 101 55 94 89 -388 328 1 118 107 124 70 112 97 -389 328 1 135 131 152 79 111 88 -390 328 1 107 91 101 58 70 60 -391 328 1 100 86 94 59 73 62 -392 328 1 89 70 72 55 79 65 -393 328 1 88 74 73 71 91 64 -394 328 1 94 80 85 65 88 60 -395 328 1 90 76 83 67 99 70 -396 328 1 80 67 62 85 97 58 -397 328 1 84 77 73 86 111 74 -398 328 1 81 66 62 79 80 53 -399 328 1 74 58 49 94 82 45 -400 328 1 81 68 60 82 87 51 -240 329 3 84 75 75 75 100 76 -241 329 3 95 86 98 76 125 104 -242 329 3 79 67 69 64 73 63 -243 329 3 68 55 48 78 68 38 -244 329 3 74 62 50 101 84 46 -245 329 3 87 80 85 89 113 83 -246 329 3 91 88 108 75 129 105 -247 329 3 90 86 105 79 135 103 -248 329 3 83 75 76 94 122 81 -249 329 3 72 62 49 113 98 48 -250 329 3 71 59 44 118 92 47 -251 329 3 69 55 38 121 84 40 -380 329 1 92 74 76 62 82 74 -381 329 1 100 82 86 52 78 72 -382 329 1 101 83 90 52 84 78 -383 329 1 104 88 95 52 89 81 -384 329 1 103 86 94 55 92 86 -385 329 1 104 86 94 53 93 87 -386 329 1 107 91 99 53 92 88 -387 329 1 96 78 83 51 79 79 -388 329 1 119 107 123 70 103 82 -389 329 1 135 131 152 79 111 88 -390 329 1 139 131 158 79 128 90 -391 329 1 113 105 117 57 105 77 -392 329 1 87 64 64 44 58 54 -393 329 1 94 78 79 66 84 67 -394 329 1 98 81 89 56 86 67 -395 329 1 95 81 87 60 91 80 -396 329 1 85 75 74 78 113 69 -397 329 1 96 84 89 73 111 79 -398 329 1 93 79 81 64 98 70 -399 329 1 87 73 73 61 85 60 -400 329 1 87 71 68 68 80 60 -240 330 3 73 63 55 104 97 58 -241 330 3 71 58 46 111 85 46 -242 330 3 71 59 51 103 85 43 -243 330 3 72 59 49 107 86 43 -244 330 3 74 62 50 101 84 46 -245 330 3 75 64 55 106 98 59 -246 330 3 76 64 62 103 108 65 -247 330 3 80 70 70 99 113 70 -248 330 3 80 73 70 96 112 67 -249 330 3 78 66 56 103 97 52 -250 330 3 71 59 45 113 84 43 -251 330 3 71 62 47 118 93 46 -380 330 1 87 73 74 72 84 64 -381 330 1 94 79 85 65 83 72 -382 330 1 98 81 87 64 88 76 -383 330 1 101 84 92 55 88 84 -384 330 1 99 84 91 59 95 81 -385 330 1 101 86 92 60 97 88 -386 330 1 99 83 90 59 98 91 -387 330 1 96 81 85 59 90 80 -388 330 1 92 76 80 55 90 73 -389 330 1 144 139 165 87 112 85 -390 330 1 107 96 114 65 96 77 -391 330 1 101 88 100 62 89 71 -392 330 1 95 79 82 60 79 66 -393 330 1 99 80 83 55 77 65 -394 330 1 98 79 87 50 74 66 -395 330 1 92 75 80 57 94 71 -396 330 1 96 81 91 67 95 76 -397 330 1 96 82 92 60 82 71 -398 330 1 95 77 81 51 79 63 -399 330 1 96 80 87 45 70 56 -400 330 1 87 71 76 58 73 51 -240 331 3 71 57 42 123 85 39 -241 331 3 68 56 40 131 84 37 -242 331 3 73 61 48 102 88 44 -243 331 3 71 58 48 100 84 41 -244 331 3 68 56 35 124 82 39 -245 331 3 67 54 34 122 80 38 -246 331 3 75 65 62 99 106 61 -247 331 3 85 76 81 84 123 80 -248 331 3 80 68 63 97 104 62 -249 331 3 70 59 44 115 83 41 -250 331 3 70 58 42 122 78 37 -251 331 3 70 60 43 125 84 38 -380 331 1 97 81 84 64 75 53 -381 331 1 98 80 87 64 90 72 -382 331 1 95 77 79 66 91 70 -383 331 1 98 81 85 63 94 83 -384 331 1 97 81 87 61 90 77 -385 331 1 94 79 83 60 90 73 -386 331 1 93 75 79 61 92 75 -387 331 1 95 76 80 58 90 76 -388 331 1 93 78 82 56 88 74 -389 331 1 86 69 73 53 91 68 -390 331 1 90 72 72 58 83 67 -391 331 1 94 75 78 59 81 68 -392 331 1 100 80 83 59 83 69 -393 331 1 99 85 90 51 82 75 -394 331 1 98 80 86 64 86 70 -395 331 1 88 73 78 64 87 62 -396 331 1 92 78 81 61 103 72 -397 331 1 93 80 88 55 88 73 -398 331 1 93 72 80 46 71 58 -399 331 1 97 79 93 49 86 71 -400 331 1 92 76 82 52 72 58 -380 332 1 93 76 84 58 83 66 -381 332 1 87 71 73 55 71 49 -382 332 1 90 67 69 45 55 44 -383 332 1 93 75 79 51 76 63 -384 332 1 88 69 70 46 69 52 -385 332 1 88 67 68 44 67 52 -386 332 1 94 76 79 48 79 67 -387 332 1 94 76 79 48 79 67 -388 332 1 93 78 82 51 79 69 -389 332 1 95 80 85 58 79 69 -390 332 1 102 86 92 57 73 71 -391 332 1 100 84 93 61 81 76 -392 332 1 92 76 79 69 88 71 -393 332 1 90 76 80 67 84 69 -394 332 1 93 78 83 61 95 67 -395 332 1 88 73 78 64 87 62 -396 332 1 94 71 76 44 70 53 -397 332 1 94 75 80 45 77 64 -398 332 1 97 76 86 45 67 56 -399 332 1 95 75 85 47 70 60 -400 332 1 93 74 75 46 68 57 -380 333 1 94 75 82 52 82 71 -381 333 1 94 71 75 45 61 47 -382 333 1 89 69 71 39 56 46 -383 333 1 91 71 74 42 61 52 -384 333 1 87 65 67 40 49 42 -385 333 1 86 63 64 37 43 33 -386 333 1 89 66 67 37 53 47 -387 333 1 89 69 70 35 57 53 -388 333 1 94 81 86 57 91 66 -389 333 1 118 105 115 72 110 98 -390 333 1 108 96 102 74 103 86 -391 333 1 88 75 78 65 87 66 -392 333 1 93 80 85 62 86 68 -393 333 1 92 78 82 58 89 66 -394 333 1 97 77 83 50 68 56 -395 333 1 97 75 77 39 57 52 -396 333 1 96 74 80 41 64 53 -397 333 1 98 75 81 42 66 54 -398 333 1 97 77 82 41 62 51 -399 333 1 93 73 75 38 59 49 -400 333 1 93 73 75 38 59 49 -396 334 1 101 82 86 47 59 46 -397 334 1 92 72 76 40 58 50 -398 334 1 95 72 76 40 62 52 -399 334 1 93 72 78 40 60 48 -400 334 1 92 74 78 41 61 52 -185 346 4 80 69 69 85 110 70 -186 346 4 71 59 52 77 79 42 -185 347 4 85 71 73 76 86 58 -186 347 4 74 61 55 79 74 42 -187 347 4 75 67 61 80 95 56 -188 347 4 78 68 66 66 109 72 -184 348 4 80 70 69 71 92 59 -185 348 4 91 79 81 78 93 67 -186 348 4 76 62 57 84 77 43 -187 348 4 76 66 60 86 94 55 -188 348 4 84 72 69 81 103 71 -189 348 4 81 68 63 85 92 64 -184 349 4 74 60 55 70 81 49 -185 349 4 87 76 72 80 93 61 -186 349 4 74 63 58 81 80 45 -187 349 4 77 64 62 81 95 55 -188 349 4 81 68 64 94 102 62 -189 349 4 73 60 52 97 82 48 -184 350 4 82 71 72 80 104 64 -185 350 4 71 59 55 72 82 47 -186 350 4 75 58 55 74 87 52 -187 350 4 81 69 68 85 112 64 -188 350 4 77 63 59 83 90 48 -189 350 4 73 56 50 75 77 46 -184 351 4 80 73 73 85 101 65 -185 351 4 78 65 64 78 77 45 -186 351 4 72 60 51 79 75 43 -187 351 4 81 70 68 83 113 67 -188 351 4 79 67 67 81 107 62 -189 351 4 71 56 49 76 74 39 -184 352 4 86 82 81 92 116 77 -185 352 4 79 63 60 75 78 50 -186 352 4 75 57 53 74 70 37 -187 352 4 79 68 68 79 96 61 -188 352 4 83 71 74 81 112 72 -189 352 4 75 61 61 79 101 55 -183 353 4 84 73 70 80 103 66 -184 353 4 87 79 83 81 116 80 -185 353 4 85 71 73 78 94 64 -186 353 4 85 68 70 77 84 54 -187 353 4 89 79 79 77 88 63 -188 353 4 85 76 78 79 103 71 -189 353 4 80 71 71 84 111 68 -183 354 4 74 64 58 82 95 54 -184 354 4 82 68 69 85 100 65 -185 354 4 80 65 66 70 97 64 -186 354 4 83 70 70 68 97 68 -187 354 4 88 75 74 70 84 59 -188 354 4 82 69 63 80 81 48 -189 354 4 81 70 71 85 105 66 -183 355 4 75 63 58 77 83 48 -184 355 4 84 71 71 89 98 60 -185 355 4 73 59 57 76 85 49 -186 355 4 74 58 55 66 75 44 -187 355 4 75 58 53 70 64 38 -188 355 4 69 54 44 85 68 34 -371 358 7 101 95 111 74 118 91 -372 358 7 118 105 119 69 137 124 -373 358 7 110 105 118 70 134 120 -371 359 7 102 97 111 75 118 92 -372 359 7 106 97 108 70 120 100 -373 359 7 110 105 118 70 134 120 -374 359 7 112 107 122 73 136 120 -371 360 7 108 100 114 62 119 107 -372 360 7 104 88 94 54 95 81 -373 360 7 109 101 111 69 108 92 -372 361 7 102 85 91 53 94 86 -373 361 7 110 95 104 63 97 86 -372 362 7 105 93 103 56 102 94 -373 362 7 111 93 95 51 87 78 -295 370 5 69 56 56 58 95 56 -296 370 5 72 55 57 59 105 66 -297 370 5 74 58 61 61 113 69 -298 370 5 75 57 63 62 122 76 -299 370 5 73 57 60 59 114 67 -300 370 5 75 56 56 54 94 58 -301 370 5 72 52 54 52 85 55 -302 370 5 72 52 50 51 84 51 -303 370 5 71 53 53 56 89 55 -304 370 5 71 55 55 57 94 57 -305 370 5 70 52 52 54 84 50 -306 370 5 70 53 48 61 72 43 -295 371 5 73 56 56 58 85 51 -296 371 5 68 51 50 57 80 48 -297 371 5 75 58 61 61 110 69 -298 371 5 75 59 61 59 114 72 -299 371 5 74 59 61 58 112 66 -300 371 5 72 55 54 55 102 62 -301 371 5 71 53 51 53 86 54 -302 371 5 71 52 50 52 74 46 -303 371 5 70 52 46 51 78 46 -304 371 5 65 49 44 56 71 41 -305 371 5 66 49 42 61 59 33 -306 371 5 70 52 48 62 65 38 -295 372 5 66 52 46 56 69 41 -296 372 5 67 49 44 60 62 33 -297 372 5 70 54 49 60 79 45 -298 372 5 70 52 52 58 85 51 -299 372 5 73 57 59 56 101 62 -300 372 5 73 56 58 54 98 59 -301 372 5 68 52 50 57 75 43 -302 372 5 69 52 49 59 69 41 -303 372 5 69 50 44 54 65 37 -304 372 5 67 49 42 61 57 29 -305 372 5 68 50 44 63 62 35 -306 372 5 71 54 52 61 84 50 -399 372 5 65 49 42 60 58 30 -400 372 5 69 51 42 63 57 32 -401 372 5 68 50 40 66 59 34 -402 372 5 70 51 42 69 68 34 -403 372 5 74 54 48 66 74 44 -404 372 5 78 59 63 65 105 62 -405 372 5 81 66 71 65 111 71 -406 372 5 71 52 48 53 70 45 -407 372 5 67 51 43 66 75 43 -408 372 5 69 56 48 67 75 41 -409 372 5 71 56 47 60 61 37 -295 373 5 68 50 41 62 60 29 -296 373 5 68 50 41 62 60 29 -297 373 5 71 52 46 62 63 36 -298 373 5 69 48 41 59 58 30 -299 373 5 67 54 53 60 83 51 -300 373 5 70 53 54 56 86 52 -301 373 5 72 52 49 58 75 43 -302 373 5 72 52 50 59 79 49 -303 373 5 69 52 47 57 63 37 -304 373 5 68 52 46 60 72 39 -305 373 5 72 55 56 59 96 59 -306 373 5 73 58 60 57 100 64 -399 373 5 68 49 42 62 56 30 -400 373 5 70 51 46 66 57 30 -401 373 5 69 51 42 63 54 31 -402 373 5 67 49 39 65 59 29 -403 373 5 73 55 51 65 68 39 -404 373 5 78 60 65 68 106 63 -405 373 5 78 66 71 65 125 78 -406 373 5 66 49 45 50 62 38 -407 373 5 67 50 46 62 62 37 -408 373 5 70 55 46 73 72 40 -409 373 5 71 57 48 78 73 39 -295 374 5 67 51 45 62 64 31 -296 374 5 67 51 45 62 66 36 -297 374 5 68 49 45 62 59 30 -298 374 5 69 49 46 60 63 36 -299 374 5 71 54 49 57 74 45 -300 374 5 72 57 57 61 87 52 -301 374 5 72 55 56 60 93 56 -302 374 5 71 50 46 58 73 43 -303 374 5 67 52 44 60 66 36 -304 374 5 68 53 51 57 81 49 -305 374 5 69 51 49 57 72 45 -306 374 5 73 53 50 59 81 48 -399 374 5 68 50 41 70 59 29 -400 374 5 72 53 46 66 63 38 -401 374 5 69 50 41 57 58 33 -402 374 5 70 51 44 73 69 35 -403 374 5 75 59 57 71 86 53 -404 374 5 78 61 64 63 97 58 -405 374 5 74 57 61 59 113 69 -406 374 5 68 50 43 53 62 35 -407 374 5 67 50 46 64 55 32 -408 374 5 73 61 51 82 87 49 -295 375 5 68 50 45 60 65 36 -296 375 5 67 50 44 61 66 38 -297 375 5 70 49 45 61 69 38 -298 375 5 68 50 44 60 60 34 -299 375 5 71 51 43 58 66 38 -300 375 5 72 57 55 62 92 56 -301 375 5 70 57 59 60 100 60 -302 375 5 68 50 48 55 77 44 -303 375 5 69 50 43 60 60 33 -304 375 5 67 52 45 59 59 33 -305 375 5 68 50 45 60 54 31 -306 375 5 71 50 51 60 75 44 -399 375 5 69 50 42 67 61 33 -400 375 5 72 54 49 60 58 35 -401 375 5 70 51 43 51 48 31 -402 375 5 72 51 45 66 69 38 -403 375 5 74 58 55 62 84 51 -404 375 5 72 56 53 51 76 50 -405 375 5 69 49 43 56 74 41 -406 375 5 70 50 44 57 59 36 -407 375 5 69 53 43 66 65 36 -408 375 5 72 59 51 72 80 43 -295 376 5 68 50 46 62 64 38 -296 376 5 67 50 44 61 66 38 -297 376 5 68 51 46 61 69 39 -298 376 5 67 50 46 58 68 39 -299 376 5 69 49 43 59 60 31 -300 376 5 69 53 50 63 78 47 -301 376 5 73 54 57 58 90 58 -302 376 5 69 50 48 54 70 39 -303 376 5 69 51 45 57 65 39 -304 376 5 69 51 44 57 65 39 -305 376 5 67 48 41 59 56 33 -306 376 5 70 51 49 61 71 44 -399 376 5 68 51 38 64 57 32 -400 376 5 68 52 43 61 60 32 -401 376 5 67 52 46 61 58 35 -402 376 5 69 54 46 65 64 37 -403 376 5 71 53 50 62 78 45 -404 376 5 70 54 48 49 56 38 -405 376 5 68 48 40 59 55 31 -406 376 5 70 51 45 57 54 29 -407 376 5 68 54 44 67 71 38 -408 376 5 71 59 52 73 86 46 -295 377 5 68 50 43 62 64 34 -296 377 5 70 50 44 61 65 37 -297 377 5 67 51 42 62 65 37 -298 377 5 64 51 43 60 64 35 -299 377 5 68 51 45 64 65 37 -300 377 5 69 54 54 66 83 49 -301 377 5 74 54 56 58 90 54 -302 377 5 71 51 45 56 66 39 -303 377 5 66 50 42 60 59 33 -304 377 5 70 50 44 59 63 37 -305 377 5 67 48 41 61 56 32 -306 377 5 71 51 47 64 67 40 -400 377 5 69 52 45 66 60 32 -401 377 5 69 54 48 65 72 41 -402 377 5 69 53 49 65 74 43 -403 377 5 71 53 50 62 78 45 -404 377 5 69 50 48 57 60 34 -405 377 5 69 53 44 62 62 35 -406 377 5 72 54 46 61 63 36 -407 377 5 68 58 50 69 72 40 -408 377 5 74 60 52 73 85 49 -295 378 5 67 50 42 62 61 30 -296 378 5 69 50 44 61 63 34 -297 378 5 67 50 40 62 61 35 -298 378 5 66 51 43 64 62 32 -299 378 5 69 53 51 64 78 47 -300 378 5 73 56 61 64 108 67 -301 378 5 73 55 58 58 102 62 -302 378 5 69 52 46 56 76 44 -303 378 5 65 49 40 62 55 29 -304 378 5 70 50 42 60 61 35 -305 378 5 69 49 43 62 60 35 -306 378 5 69 50 45 62 67 37 -400 378 5 74 59 53 69 71 44 -401 378 5 72 55 47 61 71 45 -402 378 5 69 51 45 56 61 36 -403 378 5 72 54 46 62 65 38 -404 378 5 76 58 53 64 67 41 -405 378 5 79 68 62 64 78 52 -406 378 5 72 59 54 69 78 49 -407 378 5 71 58 49 72 74 41 -408 378 5 76 61 57 63 80 47 -295 379 5 68 49 43 64 62 31 -296 379 5 69 50 42 64 64 37 -297 379 5 67 49 46 60 65 36 -298 379 5 69 52 48 62 81 46 -299 379 5 71 55 57 62 91 55 -300 379 5 75 55 62 58 99 61 -301 379 5 72 55 54 55 89 52 -302 379 5 69 52 46 56 76 44 -303 379 5 67 49 42 60 61 32 -304 379 5 67 47 40 62 54 32 -305 379 5 68 51 44 64 58 34 -306 379 5 70 51 47 66 71 38 -400 379 5 68 56 43 66 70 40 -401 379 5 68 52 44 71 68 39 -402 379 5 70 52 43 65 62 34 -403 379 5 68 51 46 58 57 34 -404 379 5 72 54 44 61 59 35 -405 379 5 72 57 50 63 76 48 -406 379 5 75 58 54 67 83 53 -407 379 5 76 61 55 74 83 46 -408 379 5 76 59 57 68 82 49 -295 380 5 68 52 47 58 76 42 -296 380 5 69 50 51 59 79 43 -297 380 5 68 53 52 58 85 51 -298 380 5 71 56 56 56 95 58 -299 380 5 73 53 55 56 91 54 -300 380 5 70 52 51 56 77 44 -301 380 5 68 51 46 56 63 36 -302 380 5 69 50 43 59 64 36 -303 380 5 66 49 41 63 55 28 -304 380 5 66 49 41 63 55 28 -305 380 5 67 51 44 67 64 38 -306 380 5 70 55 51 65 90 50 -400 380 5 69 51 43 61 65 34 -401 380 5 66 49 39 65 63 32 -402 380 5 66 51 41 73 73 38 -403 380 5 69 53 45 64 60 37 -404 380 5 70 52 50 58 64 36 -405 380 5 74 59 51 61 70 39 -406 380 5 77 64 63 74 89 55 -407 380 5 79 69 64 90 106 60 -408 380 5 81 68 65 84 105 68 -295 381 5 71 51 46 57 65 38 -296 381 5 70 52 49 57 78 45 -297 381 5 67 52 49 56 82 50 -298 381 5 68 53 51 56 80 47 -299 381 5 70 52 49 57 75 42 -300 381 5 68 49 45 57 68 40 -301 381 5 67 48 42 57 60 34 -302 381 5 66 49 42 60 55 30 -303 381 5 66 51 41 65 57 29 -304 381 5 69 53 47 67 75 44 -305 381 5 73 57 56 61 96 59 -306 381 5 74 55 61 55 94 57 -400 381 5 70 50 42 58 66 38 -401 381 5 67 49 41 59 58 31 -402 381 5 67 51 44 68 72 37 -403 381 5 71 56 49 76 82 47 -404 381 5 75 59 54 69 89 52 -405 381 5 73 56 52 59 78 46 -406 381 5 70 56 52 63 75 46 -407 381 5 73 57 53 77 87 47 -408 381 5 80 64 60 80 85 55 -116 382 5 69 51 48 60 72 40 -117 382 5 69 51 42 58 59 35 -118 382 5 69 52 42 63 58 31 -119 382 5 67 50 45 65 58 29 -120 382 5 67 49 44 59 59 31 -121 382 5 67 51 45 60 59 32 -122 382 5 70 52 45 58 63 38 -123 382 5 69 50 43 57 59 31 -124 382 5 70 51 46 62 69 39 -125 382 5 67 51 41 59 75 45 -126 382 5 70 54 47 54 64 39 -127 382 5 70 54 48 50 70 43 -400 382 5 69 52 45 56 72 40 -401 382 5 69 52 45 59 78 42 -402 382 5 72 54 48 66 80 43 -403 382 5 71 55 47 71 90 48 -404 382 5 73 58 52 67 92 53 -405 382 5 72 56 53 57 84 51 -406 382 5 70 54 46 50 56 34 -407 382 5 70 51 45 64 55 32 -408 382 5 70 53 46 65 50 29 -116 383 5 68 50 45 57 65 36 -117 383 5 70 52 45 57 62 37 -118 383 5 68 53 42 60 58 34 -119 383 5 67 52 44 64 62 33 -120 383 5 69 51 44 62 53 28 -121 383 5 68 49 43 62 56 29 -122 383 5 69 54 45 63 65 36 -123 383 5 69 52 44 61 59 33 -124 383 5 70 51 44 64 63 35 -125 383 5 69 50 45 57 72 41 -126 383 5 70 54 48 53 73 43 -127 383 5 72 58 55 59 71 42 -400 383 5 70 53 45 60 70 37 -401 383 5 69 52 48 57 78 44 -402 383 5 70 56 48 67 82 48 -403 383 5 71 55 49 69 84 48 -404 383 5 72 55 51 64 81 48 -405 383 5 74 56 50 58 79 47 -406 383 5 69 55 46 58 66 38 -407 383 5 65 52 43 60 67 41 -408 383 5 72 52 46 61 58 34 -116 384 5 67 52 44 62 58 31 -117 384 5 69 52 41 58 63 35 -118 384 5 70 52 43 61 60 36 -119 384 5 67 52 44 64 62 33 -120 384 5 67 50 45 62 60 34 -121 384 5 68 51 43 63 56 31 -122 384 5 70 52 43 60 65 37 -123 384 5 70 51 47 55 67 39 -124 384 5 68 50 45 57 60 33 -125 384 5 70 53 44 61 69 38 -126 384 5 72 56 47 60 78 46 -127 384 5 73 59 52 70 95 54 -400 384 5 71 54 44 60 73 40 -401 384 5 69 56 48 66 79 46 -402 384 5 70 55 50 60 75 42 -403 384 5 69 53 45 63 77 45 -404 384 5 70 53 43 60 68 41 -405 384 5 70 54 49 64 77 44 -406 384 5 71 58 52 62 73 48 -407 384 5 71 58 52 62 73 48 -408 384 5 76 60 56 63 70 42 -116 385 5 69 50 41 57 59 33 -117 385 5 69 50 43 58 60 32 -118 385 5 71 52 43 62 60 31 -119 385 5 68 50 43 64 61 33 -120 385 5 68 51 45 60 63 35 -121 385 5 67 50 41 59 57 31 -122 385 5 67 51 43 63 56 31 -123 385 5 67 51 45 58 65 37 -124 385 5 67 49 43 55 55 30 -125 385 5 69 52 48 63 66 38 -126 385 5 71 56 47 69 83 44 -127 385 5 68 53 46 63 83 48 -228 385 4 82 64 66 80 71 44 -229 385 4 72 60 56 83 63 37 -116 386 5 68 49 45 56 57 33 -117 386 5 68 49 45 56 57 33 -118 386 5 68 50 43 61 56 30 -119 386 5 67 52 45 64 60 33 -120 386 5 67 50 44 61 59 30 -121 386 5 68 52 45 60 60 33 -122 386 5 69 52 46 62 63 34 -123 386 5 66 51 45 63 64 37 -124 386 5 67 49 44 56 67 36 -125 386 5 72 54 47 70 78 43 -126 386 5 71 57 48 69 88 46 -127 386 5 70 51 43 55 65 38 -228 386 4 82 67 74 76 96 64 -229 386 4 78 68 67 85 112 68 -230 386 4 80 66 66 87 94 57 -231 386 4 76 61 60 86 82 48 -116 387 5 70 50 44 56 63 37 -117 387 5 70 52 42 61 62 36 -118 387 5 72 52 47 59 68 39 -119 387 5 68 50 45 61 59 33 -120 387 5 67 50 42 60 57 30 -121 387 5 68 52 47 65 67 37 -122 387 5 68 54 46 62 77 45 -123 387 5 70 55 48 56 71 42 -124 387 5 73 54 51 64 86 47 -125 387 5 72 54 47 70 78 43 -126 387 5 69 52 46 67 78 45 -127 387 5 67 51 46 55 72 43 -228 387 4 71 53 54 64 89 55 -229 387 4 78 63 60 70 94 58 -230 387 4 84 71 72 78 98 65 -231 387 4 85 73 77 76 87 62 -116 388 5 71 51 44 58 66 38 -117 388 5 71 55 47 63 74 44 -118 388 5 71 55 51 56 79 47 -119 388 5 68 50 45 59 62 34 -120 388 5 70 53 48 66 71 40 -121 388 5 70 53 48 57 76 44 -122 388 5 69 52 48 58 72 43 -123 388 5 71 55 50 68 80 45 -124 388 5 69 49 44 53 66 37 -125 388 5 68 51 45 60 69 41 -126 388 5 69 55 47 59 73 44 -127 388 5 70 58 47 62 80 44 -228 388 4 68 51 47 65 66 39 -229 388 4 78 61 57 74 78 43 -230 388 4 87 75 76 74 96 65 -231 388 4 89 79 82 85 100 68 -232 388 4 75 60 56 79 79 45 -116 389 5 68 51 47 58 72 41 -117 389 5 70 54 50 62 80 46 -118 389 5 70 52 49 53 76 44 -119 389 5 71 53 50 57 77 45 -120 389 5 72 54 49 59 77 46 -121 389 5 67 51 46 54 68 40 -122 389 5 69 54 49 67 80 44 -123 389 5 69 51 45 56 69 38 -124 389 5 67 46 41 50 52 30 -125 389 5 67 51 45 52 64 37 -126 389 5 69 55 45 60 70 41 -127 389 5 70 55 47 58 78 45 -228 389 4 73 57 57 70 81 47 -229 389 4 79 63 64 74 103 58 -230 389 4 81 71 71 77 102 63 -231 389 4 81 71 67 86 102 68 -232 389 4 84 72 71 85 86 53 -116 390 5 71 53 48 58 71 38 -117 390 5 70 53 48 56 77 46 -118 390 5 71 51 47 53 72 42 -119 390 5 69 54 50 54 78 47 -120 390 5 72 53 48 53 76 46 -121 390 5 69 55 50 62 78 46 -122 390 5 71 57 54 65 87 49 -123 390 5 69 49 44 44 67 37 -124 390 5 67 48 42 53 53 30 -125 390 5 67 49 44 56 57 33 -126 390 5 70 52 45 56 66 38 -127 390 5 70 53 45 56 73 43 -228 390 4 79 68 68 75 98 61 -229 390 4 83 72 72 76 106 68 -230 390 4 84 71 69 85 103 59 -231 390 4 81 71 67 91 104 66 -232 390 4 79 69 62 92 97 61 -116 391 5 72 54 47 57 78 46 -117 391 5 68 53 48 57 72 41 -118 391 5 71 50 46 57 67 39 -119 391 5 66 50 44 54 63 38 -120 391 5 69 51 44 60 64 39 -121 391 5 75 57 55 66 89 52 -122 391 5 71 53 52 51 81 48 -123 391 5 68 49 44 46 57 34 -124 391 5 69 51 45 55 58 35 -125 391 5 70 49 45 65 58 32 -126 391 5 69 50 43 55 61 34 -127 391 5 68 52 44 54 62 36 -228 391 4 78 66 64 82 98 64 -229 391 4 83 70 69 83 101 67 -230 391 4 83 71 67 89 102 62 -231 391 4 81 70 66 96 106 61 -232 391 4 80 69 64 91 102 64 -116 392 5 71 54 50 54 81 48 -117 392 5 70 54 48 59 68 39 -118 392 5 71 50 47 62 63 35 -119 392 5 67 50 40 58 54 29 -120 392 5 69 53 47 70 70 39 -121 392 5 72 54 53 55 80 48 -122 392 5 68 48 42 45 59 36 -123 392 5 69 48 41 54 55 32 -124 392 5 69 51 44 56 61 37 -125 392 5 70 51 43 60 58 34 -126 392 5 66 49 41 59 57 33 -127 392 5 66 49 43 56 54 30 -228 392 4 90 75 78 83 109 71 -229 392 4 79 68 62 93 96 61 -230 392 4 72 61 52 104 89 48 -231 392 4 75 59 52 92 82 42 -232 392 4 80 67 64 90 93 57 -117 393 5 71 53 55 64 81 47 -118 393 5 72 56 54 58 93 52 -119 393 5 68 50 42 65 61 33 -120 393 5 71 56 55 66 86 49 -121 393 5 68 50 48 45 67 43 -122 393 5 65 46 41 54 45 27 -123 393 5 68 49 43 62 59 30 -124 393 5 66 50 43 58 57 31 -125 393 5 68 50 40 55 55 30 -126 393 5 67 49 42 62 56 28 -127 393 5 66 49 42 63 58 33 -228 393 4 88 74 74 88 99 65 -229 393 4 75 62 58 89 80 52 -230 393 4 71 57 46 101 76 40 -231 393 4 70 54 41 104 82 41 -232 393 4 75 60 51 87 77 46 -267 393 4 76 61 57 63 75 50 -268 393 4 77 60 57 67 98 61 -117 394 5 75 57 61 62 109 65 -118 394 5 67 53 49 49 74 44 -119 394 5 70 52 50 51 76 48 -120 394 5 68 45 40 49 49 29 -121 394 5 68 49 42 62 55 28 -122 394 5 69 52 47 65 64 34 -123 394 5 67 50 46 61 55 29 -124 394 5 70 50 42 60 53 30 -125 394 5 69 50 41 60 52 30 -126 394 5 69 49 41 65 55 29 -127 394 5 66 50 45 63 60 32 -267 394 4 73 63 60 79 89 53 -268 394 4 81 69 66 84 107 65 -269 394 4 75 64 57 86 101 60 -270 394 4 78 64 57 90 96 52 -271 394 4 85 75 72 99 100 60 -272 394 4 84 76 72 96 99 60 -117 395 5 67 53 49 49 74 44 -118 395 5 69 48 42 56 50 29 -119 395 5 67 48 42 57 48 26 -120 395 5 69 52 46 67 67 35 -121 395 5 70 52 49 62 70 43 -122 395 5 71 52 48 57 66 40 -123 395 5 71 53 47 59 62 34 -124 395 5 70 51 43 64 59 32 -125 395 5 69 53 47 63 64 35 -126 395 5 69 52 45 58 62 36 -127 395 5 67 50 43 59 57 33 -267 395 4 80 67 66 86 91 55 -268 395 4 87 77 77 85 109 72 -269 395 4 82 70 67 89 100 59 -270 395 4 75 62 55 103 82 43 -271 395 4 70 59 48 109 89 45 -272 395 4 80 71 72 89 102 64 -273 395 4 90 80 91 72 104 76 -267 396 4 89 80 80 91 116 77 -268 396 4 89 80 80 91 116 77 -269 396 4 92 83 84 90 119 80 -270 396 4 86 73 69 96 94 57 -271 396 4 76 61 54 92 75 36 -272 396 4 79 65 67 75 97 60 -273 396 4 88 76 82 68 114 83 -267 397 4 85 75 76 87 108 70 -268 397 4 89 79 79 83 94 66 -269 397 4 101 87 89 68 97 73 -270 397 4 84 70 65 84 88 58 -271 397 4 73 59 50 92 83 47 -272 397 4 87 75 75 78 106 74 -273 397 4 80 67 66 80 93 60 -267 398 4 83 71 72 84 106 65 -268 398 4 85 74 72 85 102 64 -269 398 4 84 70 65 84 86 62 -270 398 4 72 59 51 90 84 54 -271 398 4 73 61 54 94 76 40 -272 398 4 86 71 68 79 92 58 -273 398 4 83 69 65 67 94 60 -186 399 6 76 64 69 76 94 54 -187 399 6 81 69 72 69 104 66 -188 399 6 76 62 63 68 86 51 -267 399 4 82 71 69 79 102 64 -268 399 4 85 74 72 85 102 64 -269 399 4 83 71 68 90 89 52 -270 399 4 76 61 54 94 79 42 -271 399 4 72 56 46 91 78 43 -272 399 4 75 60 52 84 78 47 -273 399 4 76 62 59 77 86 52 -185 400 6 74 59 62 60 76 46 -186 400 6 76 64 69 76 94 54 -187 400 6 72 57 56 49 75 41 -188 400 6 70 51 46 35 33 18 -267 400 4 81 70 68 81 89 56 -268 400 4 77 68 64 93 109 61 -269 400 4 85 71 70 94 95 61 -270 400 4 79 65 58 95 85 52 -271 400 4 86 75 73 90 93 61 -272 400 4 82 73 70 88 77 53 -273 400 4 73 58 53 86 71 38 -185 401 6 76 61 60 69 91 49 -186 401 6 68 48 40 25 24 16 -187 401 6 66 45 38 17 10 8 -188 401 6 66 46 33 13 10 10 -269 401 4 81 67 65 92 88 57 -270 401 4 86 72 70 85 89 67 -271 401 4 84 73 76 101 92 60 -272 401 4 84 69 70 84 81 55 -273 401 4 72 55 48 80 64 36 -274 401 4 79 64 58 85 80 50 -186 402 6 69 48 42 26 29 19 -271 402 4 75 61 55 100 87 46 -272 402 4 69 54 46 88 66 35 -273 402 4 70 54 41 86 64 34 -274 402 4 79 64 58 85 80 50 +id b1 b2 b3 b4 b5 b7 +133 59 5 94 76 80 58 89 70 +134 59 5 91 76 77 56 81 61 +135 59 5 84 70 76 62 102 65 +136 59 5 79 65 75 64 123 78 +137 59 5 77 64 70 62 128 81 +138 59 5 77 61 67 61 119 75 +139 59 5 79 61 74 64 137 86 +140 59 5 77 61 70 61 126 79 +141 59 5 78 62 68 61 121 77 +142 59 5 77 60 65 61 118 77 +143 59 5 75 60 64 60 114 72 +144 59 5 75 59 60 59 104 68 +145 59 5 81 63 72 72 110 68 +133 60 5 79 65 64 63 89 51 +134 60 5 78 66 66 61 100 56 +135 60 5 80 63 66 60 96 60 +136 60 5 76 59 62 60 90 56 +137 60 5 73 59 60 58 93 56 +138 60 5 73 56 57 58 86 52 +139 60 5 75 60 64 59 113 72 +140 60 5 78 60 63 58 113 72 +141 60 5 77 58 61 59 108 68 +142 60 5 73 57 57 56 100 63 +143 60 5 75 58 57 55 97 61 +144 60 5 73 59 57 58 98 61 +145 60 5 75 60 63 59 104 62 +133 61 5 73 59 60 65 94 58 +134 61 5 74 55 58 57 95 59 +135 61 5 72 55 54 53 84 52 +136 61 5 75 55 55 58 85 52 +137 61 5 75 55 55 58 85 52 +138 61 5 72 52 51 64 75 43 +139 61 5 72 56 58 57 85 53 +140 61 5 74 59 61 58 101 62 +141 61 5 76 57 61 59 100 59 +142 61 5 73 55 58 54 85 52 +143 61 5 71 55 52 56 81 48 +144 61 5 75 57 55 56 90 55 +145 61 5 73 57 56 54 91 55 +133 62 5 75 58 54 59 89 52 +134 62 5 70 53 52 58 83 50 +135 62 5 69 54 51 57 77 46 +136 62 5 74 53 49 59 81 47 +137 62 5 71 52 44 69 67 39 +138 62 5 68 55 45 69 65 36 +139 62 5 74 56 57 56 88 52 +140 62 5 71 55 58 59 95 56 +141 62 5 72 54 54 55 85 57 +142 62 5 71 55 51 56 79 47 +143 62 5 72 54 51 55 75 48 +144 62 5 69 51 48 53 60 35 +145 62 5 69 51 46 54 63 36 +133 63 5 74 58 55 61 95 57 +134 63 5 70 56 52 68 78 47 +135 63 5 69 54 47 73 76 43 +136 63 5 72 55 46 71 73 40 +137 63 5 72 56 48 72 67 36 +138 63 5 68 55 47 72 71 38 +139 63 5 71 54 56 58 83 49 +140 63 5 71 54 56 58 90 54 +141 63 5 70 55 52 56 87 54 +142 63 5 71 52 48 53 71 42 +143 63 5 68 51 46 58 67 38 +144 63 5 70 56 50 64 73 40 +145 63 5 68 49 42 59 58 32 +134 64 5 74 62 64 73 109 66 +135 64 5 73 62 57 84 103 59 +136 64 5 73 60 52 84 102 54 +137 64 5 72 59 52 75 90 48 +138 64 5 68 55 47 72 71 38 +139 64 5 70 54 51 63 84 50 +140 64 5 73 56 57 58 93 59 +141 64 5 70 56 54 62 85 48 +142 64 5 72 54 52 65 72 41 +143 64 5 70 55 47 72 79 44 +144 64 5 71 61 57 75 93 53 +145 64 5 71 51 48 60 64 36 +134 65 5 80 66 74 74 111 69 +135 65 5 79 69 69 80 110 64 +136 65 5 74 63 61 80 112 62 +137 65 5 73 58 55 73 100 57 +138 65 5 70 57 52 71 79 43 +139 65 5 74 58 62 65 91 54 +140 65 5 75 58 61 62 102 62 +141 65 5 73 56 60 65 98 58 +142 65 5 73 57 55 67 93 55 +143 65 5 72 57 51 74 98 55 +144 65 5 71 57 55 68 90 53 +145 65 5 72 54 52 60 74 41 +138 80 7 87 75 80 71 107 73 +139 80 7 96 84 95 77 128 88 +140 80 7 94 86 99 78 138 89 +141 80 7 102 91 107 75 127 86 +138 81 7 98 88 96 79 139 98 +139 81 7 110 97 115 81 152 122 +140 81 7 125 115 135 76 139 125 +141 81 7 100 91 101 75 109 83 +138 82 7 94 80 89 74 136 99 +139 82 7 104 89 98 71 142 116 +140 82 7 135 129 148 78 179 176 +141 82 7 118 111 128 71 143 137 +138 83 7 86 74 74 81 119 66 +139 83 7 100 88 97 76 125 96 +140 83 7 131 125 147 78 183 178 +141 83 7 132 122 141 74 172 167 +138 84 7 112 100 110 88 132 101 +139 84 7 117 110 122 82 127 105 +140 84 7 130 123 144 79 182 177 +141 84 7 124 112 133 68 173 174 +146 84 7 95 85 95 62 116 95 +147 84 7 116 108 129 69 137 133 +148 84 7 126 118 144 73 143 132 +149 84 7 109 96 116 64 128 116 +150 84 7 121 108 124 74 107 83 +145 85 7 93 76 85 62 105 77 +146 85 7 113 97 114 62 121 112 +147 85 7 122 111 134 72 139 133 +148 85 7 122 116 134 72 140 131 +149 85 7 115 102 121 64 125 112 +150 85 7 133 122 141 73 109 89 +145 86 7 116 104 118 68 146 131 +146 86 7 125 111 127 67 144 140 +147 86 7 122 112 131 69 138 132 +148 86 7 117 106 126 65 115 108 +149 86 7 119 109 121 65 124 111 +150 86 7 114 99 109 61 108 91 +403 114 1 97 83 90 58 85 74 +404 114 1 99 81 88 51 77 65 +402 115 1 98 88 93 57 100 90 +403 115 1 99 86 91 57 90 74 +404 115 1 93 76 75 57 84 69 +405 115 1 83 67 68 55 85 66 +402 116 1 129 117 129 69 103 91 +403 116 1 97 85 88 64 93 73 +404 116 1 106 90 98 64 99 76 +405 116 1 105 94 100 64 103 88 +406 116 1 112 97 109 65 101 92 +407 116 1 104 94 109 60 92 80 +408 116 1 112 102 119 62 101 88 +401 117 1 124 105 116 54 120 117 +402 117 1 157 139 156 80 165 156 +403 117 1 94 87 96 78 115 82 +404 117 1 103 87 97 63 101 83 +405 117 1 118 105 117 69 106 88 +406 117 1 108 92 101 63 100 80 +407 117 1 110 102 120 68 127 102 +408 117 1 108 102 120 63 108 90 +409 117 1 110 98 111 58 116 103 +410 117 1 106 93 98 51 69 61 +400 118 1 92 77 76 57 72 54 +401 118 1 99 81 82 52 86 81 +402 118 1 117 104 116 61 128 128 +403 118 1 102 94 110 73 123 100 +404 118 1 83 70 73 60 92 71 +405 118 1 103 89 106 65 109 88 +406 118 1 84 66 68 49 73 64 +407 118 1 99 87 96 56 89 80 +408 118 1 111 99 110 57 93 87 +409 118 1 111 96 107 53 86 77 +410 118 1 104 89 90 46 88 83 +411 118 1 108 99 114 57 102 91 +412 118 1 99 86 93 56 83 76 +399 119 1 72 60 50 62 68 45 +400 119 1 81 68 60 67 80 59 +401 119 1 102 94 105 71 114 92 +402 119 1 102 94 105 71 114 92 +403 119 1 97 88 101 65 115 94 +404 119 1 97 83 89 60 90 61 +405 119 1 113 103 118 69 105 89 +406 119 1 94 78 85 55 87 69 +407 119 1 121 110 132 69 141 125 +408 119 1 149 136 154 77 150 149 +409 119 1 134 117 128 61 121 120 +410 119 1 96 76 77 38 58 55 +411 119 1 95 82 92 51 99 80 +412 119 1 109 96 113 70 99 86 +413 119 1 104 87 98 61 89 74 +399 120 1 83 68 71 66 63 39 +400 120 1 86 71 69 76 83 50 +401 120 1 96 87 95 75 109 76 +402 120 1 97 86 93 62 102 79 +403 120 1 98 85 95 66 98 79 +404 120 1 113 101 113 70 112 99 +405 120 1 98 85 96 59 93 81 +406 120 1 141 126 145 74 143 135 +407 120 1 159 147 168 81 154 147 +408 120 1 135 124 139 68 126 121 +409 120 1 107 87 95 55 90 82 +410 120 1 92 74 84 51 81 69 +411 120 1 110 99 117 66 107 95 +412 120 1 106 100 122 69 120 103 +413 120 1 100 87 97 55 95 81 +398 121 1 94 78 79 64 76 60 +399 121 1 95 83 84 65 80 64 +400 121 1 99 86 91 69 94 61 +401 121 1 103 92 102 68 106 81 +402 121 1 110 102 117 64 110 98 +403 121 1 114 105 117 73 124 100 +404 121 1 104 95 105 66 104 91 +405 121 1 102 87 92 55 95 87 +406 121 1 114 99 111 58 111 111 +407 121 1 128 112 128 61 107 104 +408 121 1 124 112 126 61 91 89 +409 121 1 104 90 94 59 88 81 +410 121 1 103 87 99 58 98 84 +411 121 1 110 97 112 62 105 94 +412 121 1 97 85 95 53 85 75 +397 122 1 102 90 93 75 95 68 +398 122 1 106 91 102 74 94 69 +399 122 1 111 98 111 68 97 80 +400 122 1 125 116 125 71 100 78 +401 122 1 107 95 104 64 94 76 +402 122 1 114 109 126 67 123 110 +403 122 1 121 115 133 75 130 108 +404 122 1 109 100 111 70 116 100 +405 122 1 98 86 92 59 86 75 +406 122 1 94 77 82 44 79 73 +407 122 1 96 79 84 43 88 80 +408 122 1 110 96 109 56 109 102 +409 122 1 109 96 109 58 99 92 +410 122 1 96 78 81 54 83 72 +411 122 1 104 88 97 56 91 78 +397 123 1 128 118 133 76 115 95 +398 123 1 117 102 118 70 103 82 +399 123 1 116 103 113 69 100 82 +400 123 1 128 118 131 71 97 86 +401 123 1 108 98 114 61 97 84 +402 123 1 118 112 131 67 110 96 +403 123 1 115 109 122 67 122 101 +404 123 1 104 99 113 71 107 83 +405 123 1 94 77 84 58 87 73 +406 123 1 99 87 96 55 81 65 +407 123 1 125 115 134 71 111 93 +408 123 1 132 122 140 76 134 118 +409 123 1 109 99 114 60 110 101 +410 123 1 102 84 95 51 86 74 +411 123 1 107 90 102 55 100 88 +397 124 1 112 94 103 54 95 84 +398 124 1 107 91 102 62 101 88 +399 124 1 113 102 114 68 114 98 +400 124 1 120 116 129 70 124 109 +401 124 1 126 122 141 72 131 113 +402 124 1 128 121 142 72 125 105 +403 124 1 121 117 136 74 114 94 +404 124 1 99 87 95 67 101 78 +405 124 1 91 74 81 63 107 80 +406 124 1 100 87 99 59 101 79 +407 124 1 119 109 126 67 121 106 +408 124 1 122 116 133 72 134 120 +409 124 1 102 95 109 58 106 95 +410 124 1 107 94 105 54 102 94 +399 125 1 106 102 118 68 116 102 +400 125 1 116 120 147 80 137 115 +401 125 1 126 123 147 78 131 108 +402 125 1 121 109 131 72 132 105 +403 125 1 121 117 136 74 114 94 +404 125 1 91 82 89 66 95 73 +405 125 1 81 68 67 61 96 71 +406 125 1 89 73 81 55 95 74 +407 125 1 105 94 107 60 113 100 +408 125 1 121 114 134 70 128 117 +409 125 1 110 100 119 61 107 95 +401 126 1 91 78 87 68 94 70 +402 126 1 89 75 76 66 97 74 +403 126 1 88 78 82 57 93 71 +404 126 1 93 82 86 60 93 76 +405 126 1 101 88 100 61 100 85 +406 126 1 110 98 115 62 113 98 +407 126 1 112 103 122 63 120 105 +408 126 1 111 98 115 59 113 98 +402 127 1 91 75 79 66 107 71 +403 127 1 95 84 89 71 113 90 +404 127 1 96 86 89 81 123 96 +405 127 1 114 102 116 68 126 111 +406 127 1 117 102 120 58 110 108 +407 127 1 120 107 126 64 117 103 +408 127 1 142 129 151 76 129 112 +404 128 1 88 77 79 86 114 80 +405 128 1 107 94 104 71 109 94 +406 128 1 106 89 100 55 94 92 +407 128 1 115 102 117 69 134 119 +406 129 1 92 76 77 66 108 81 +352 134 3 79 66 71 65 122 77 +353 134 3 82 67 77 73 131 83 +354 134 3 79 65 73 80 127 77 +355 134 3 77 65 64 80 109 63 +352 135 3 78 65 72 66 124 77 +353 135 3 81 68 78 77 137 84 +354 135 3 78 66 70 82 119 71 +355 135 3 71 57 50 76 79 45 +356 135 3 68 52 46 69 62 36 +357 135 3 74 59 56 72 85 50 +358 135 3 78 68 69 79 120 69 +359 135 3 83 69 74 76 124 73 +360 135 3 81 70 74 82 123 75 +352 136 3 81 67 78 78 138 84 +353 136 3 81 69 74 83 135 83 +354 136 3 75 63 56 82 99 58 +355 136 3 69 55 46 77 69 36 +356 136 3 68 52 43 73 56 30 +357 136 3 72 56 49 74 79 45 +358 136 3 78 68 69 79 120 69 +359 136 3 81 71 75 81 129 76 +360 136 3 85 75 78 82 123 73 +361 136 3 84 81 90 83 113 75 +362 136 3 84 83 98 88 126 89 +363 136 3 81 77 94 90 127 80 +352 137 3 81 68 72 87 121 75 +353 137 3 76 63 60 78 100 60 +354 137 3 71 56 46 76 75 40 +355 137 3 70 55 46 77 67 33 +356 137 3 67 54 45 75 62 34 +357 137 3 71 58 49 75 67 37 +358 137 3 78 68 69 80 124 73 +359 137 3 82 72 74 78 134 82 +360 137 3 88 81 90 77 137 93 +361 137 3 91 86 101 80 136 99 +362 137 3 83 76 85 83 122 85 +363 137 3 85 83 100 85 129 89 +352 138 3 72 58 52 79 81 47 +353 138 3 72 57 48 76 81 45 +354 138 3 72 55 47 80 74 38 +355 138 3 72 54 47 78 63 33 +356 138 3 69 53 44 77 56 28 +357 138 3 69 56 48 79 59 32 +358 138 3 76 66 59 80 99 55 +359 138 3 80 70 73 79 128 77 +360 138 3 84 76 86 80 126 87 +361 138 3 87 81 92 86 132 90 +362 138 3 83 75 82 80 134 84 +363 138 3 85 79 90 86 128 85 +352 139 3 73 57 50 78 73 41 +353 139 3 72 57 49 79 81 41 +354 139 3 71 57 50 79 75 37 +355 139 3 70 56 46 78 62 35 +356 139 3 69 58 47 80 61 35 +357 139 3 74 62 60 83 82 44 +358 139 3 76 64 59 83 87 50 +359 139 3 76 68 66 94 112 60 +360 139 3 77 69 60 96 106 64 +361 139 3 76 69 58 107 102 55 +362 139 3 78 70 68 98 120 70 +363 139 3 89 84 95 75 143 99 +352 140 3 69 55 51 77 79 42 +353 140 3 71 56 48 76 65 35 +354 140 3 73 55 48 79 62 34 +355 140 3 69 58 52 85 81 45 +356 140 3 78 67 69 88 123 72 +357 140 3 83 70 76 82 139 83 +358 140 3 76 68 66 90 111 61 +359 140 3 81 71 67 91 117 67 +360 140 3 80 69 69 100 111 59 +361 140 3 79 71 66 107 103 59 +362 140 3 85 74 75 85 128 79 +363 140 3 80 72 72 73 140 89 +352 141 3 73 58 55 81 92 50 +353 141 3 71 58 50 85 81 44 +354 141 3 73 60 50 91 77 40 +355 141 3 75 64 57 93 97 54 +356 141 3 81 71 73 85 137 80 +357 141 3 85 71 73 80 142 84 +358 141 3 78 68 65 90 113 66 +359 141 3 79 67 66 95 114 62 +360 141 3 85 72 74 93 133 77 +361 141 3 77 70 63 109 106 58 +362 141 3 80 73 70 89 119 74 +363 141 3 85 74 75 85 128 79 +352 142 3 74 63 56 96 104 56 +353 142 3 71 60 48 110 97 49 +354 142 3 70 59 47 111 88 41 +355 142 3 72 62 53 99 97 50 +356 142 3 79 68 64 89 122 71 +357 142 3 82 71 77 81 139 83 +358 142 3 78 68 67 86 123 75 +359 142 3 79 67 63 95 110 57 +360 142 3 85 72 76 91 132 77 +361 142 3 79 69 59 108 104 55 +362 142 3 82 76 75 95 119 72 +363 142 3 89 80 100 67 117 83 +352 143 3 76 64 61 91 105 58 +353 143 3 77 62 55 112 102 53 +354 143 3 73 60 47 117 91 42 +355 143 3 70 60 47 110 88 40 +356 143 3 76 64 58 101 100 55 +357 143 3 78 66 61 86 115 66 +358 143 3 82 70 73 81 121 75 +359 143 3 76 64 63 86 100 56 +360 143 3 81 71 69 90 116 67 +361 143 3 77 67 60 107 95 47 +362 143 3 82 74 69 102 109 62 +363 143 3 106 105 128 84 158 106 +352 144 3 77 66 65 77 114 67 +353 144 3 78 66 64 79 109 61 +354 144 3 76 61 56 91 97 56 +355 144 3 74 62 51 109 86 49 +356 144 3 77 64 58 98 78 46 +357 144 3 79 65 59 76 81 51 +358 144 3 78 61 62 63 106 66 +359 144 3 78 62 58 65 91 58 +360 144 3 81 72 72 81 104 65 +361 144 3 83 71 65 98 100 57 +362 144 3 80 73 66 99 112 65 +363 144 3 93 89 100 85 149 95 +352 145 3 77 67 67 79 113 69 +353 145 3 74 65 63 83 104 61 +354 145 3 79 65 64 89 102 60 +355 145 3 77 64 57 92 89 55 +356 145 3 79 64 59 64 63 47 +357 145 3 89 73 71 50 63 55 +358 145 3 78 62 63 65 96 63 +359 145 3 79 67 70 71 109 71 +360 145 3 87 74 76 87 120 75 +361 145 3 81 72 73 99 117 71 +362 145 3 84 76 76 87 137 88 +363 145 3 86 78 82 83 128 78 +352 146 3 85 73 73 72 100 67 +353 146 3 81 68 68 87 109 63 +354 146 3 78 67 62 87 95 49 +355 146 3 81 63 57 50 60 37 +356 146 3 91 71 66 39 57 48 +357 146 3 89 73 75 54 78 62 +358 146 3 79 66 68 62 108 71 +359 146 3 81 72 77 86 131 82 +360 146 3 81 72 77 86 131 82 +361 146 3 79 71 70 97 131 75 +362 146 3 85 74 84 77 145 94 +363 146 3 86 79 86 78 137 84 +352 147 3 86 73 76 90 100 70 +353 147 3 77 65 60 83 96 60 +354 147 3 76 65 57 67 85 51 +355 147 3 77 61 52 70 71 40 +356 147 3 80 68 60 66 74 48 +357 147 3 82 67 69 70 83 49 +358 147 3 79 66 63 68 88 61 +359 147 3 91 77 83 71 115 84 +360 147 3 87 77 79 85 122 85 +361 147 3 84 76 80 86 135 80 +362 147 3 84 74 83 74 139 88 +363 147 3 82 78 84 74 138 86 +352 148 3 88 75 76 92 99 57 +353 148 3 85 69 64 63 70 43 +354 148 3 81 65 61 58 68 47 +355 148 3 72 59 52 75 82 46 +356 148 3 74 62 54 65 81 50 +357 148 3 82 67 69 70 83 49 +358 148 3 86 70 67 66 85 62 +359 148 3 108 90 99 60 100 87 +360 148 3 110 93 103 66 101 88 +361 148 3 86 76 75 98 108 63 +362 148 3 84 73 73 92 130 79 +363 148 3 85 75 85 74 139 86 +352 149 3 100 84 84 68 97 79 +353 149 3 91 73 70 58 70 55 +354 149 3 80 63 60 69 82 55 +355 149 3 70 56 50 66 84 49 +356 149 3 73 59 52 61 81 48 +357 149 3 80 64 64 67 89 60 +358 149 3 82 66 66 55 86 63 +359 149 3 93 79 81 52 89 71 +360 149 3 107 87 93 64 93 73 +361 149 3 79 67 60 94 85 44 +362 149 3 76 67 61 101 110 61 +363 149 3 84 71 81 81 134 85 +264 169 5 71 50 43 49 65 40 +265 169 5 69 53 44 53 68 39 +266 169 5 69 50 46 57 72 39 +267 169 5 68 50 44 56 54 32 +268 169 5 70 52 48 51 57 35 +269 169 5 76 63 56 65 78 49 +270 169 5 75 62 61 67 98 59 +271 169 5 74 57 56 67 87 49 +272 169 5 72 55 50 60 66 38 +273 169 5 74 58 58 64 98 58 +274 169 5 74 59 59 63 100 63 +102 170 1 82 64 57 75 75 48 +103 170 1 87 72 74 64 90 68 +104 170 1 104 89 96 54 92 80 +264 170 5 70 50 41 48 56 35 +265 170 5 68 51 43 54 64 37 +266 170 5 68 56 49 56 70 40 +267 170 5 68 50 43 50 52 35 +268 170 5 70 53 46 57 64 38 +269 170 5 70 56 52 60 78 46 +270 170 5 71 52 48 58 81 48 +271 170 5 73 54 48 58 72 41 +272 170 5 70 55 57 61 89 54 +273 170 5 78 58 65 61 104 64 +274 170 5 74 60 64 60 100 63 +100 171 1 97 79 78 60 71 50 +101 171 1 95 76 77 59 76 54 +102 171 1 112 96 103 69 97 80 +103 171 1 99 82 87 58 94 75 +104 171 1 103 89 100 61 94 78 +264 171 5 68 50 44 53 62 34 +265 171 5 68 53 50 62 75 41 +266 171 5 71 56 53 63 76 45 +267 171 5 69 52 48 55 60 37 +268 171 5 68 49 41 56 48 25 +269 171 5 68 52 45 62 58 31 +270 171 5 69 50 44 59 62 35 +271 171 5 72 53 49 61 72 39 +272 171 5 73 56 55 60 90 55 +273 171 5 76 58 61 59 99 62 +274 171 5 70 56 59 58 93 54 +294 171 6 74 59 61 63 110 70 +295 171 6 73 58 59 64 105 65 +296 171 6 75 61 60 66 116 71 +297 171 6 78 60 62 66 115 71 +298 171 6 75 59 56 64 103 63 +299 171 6 75 60 62 63 108 65 +300 171 6 75 59 63 66 101 61 +301 171 6 74 57 56 63 94 55 +98 172 1 99 80 86 57 93 68 +99 172 1 99 79 79 45 63 54 +100 172 1 100 75 78 37 57 46 +101 172 1 97 73 78 48 62 45 +102 172 1 111 100 108 68 101 88 +103 172 1 111 97 107 65 98 86 +104 172 1 100 87 96 63 94 81 +264 172 5 69 53 52 60 77 44 +265 172 5 74 59 63 69 94 56 +266 172 5 71 56 53 63 76 45 +267 172 5 73 58 54 59 76 49 +268 172 5 68 50 40 54 49 27 +269 172 5 67 48 42 61 59 33 +270 172 5 66 51 45 60 62 33 +271 172 5 70 54 50 64 70 37 +272 172 5 72 56 55 62 84 51 +273 172 5 74 58 64 60 98 60 +274 172 5 72 58 60 60 93 54 +293 172 6 75 62 62 64 115 72 +294 172 6 74 59 58 62 115 73 +295 172 6 75 58 59 61 107 65 +296 172 6 76 58 62 62 115 71 +297 172 6 73 59 63 60 111 72 +298 172 6 75 61 65 59 117 73 +299 172 6 77 59 62 64 113 70 +300 172 6 75 58 63 62 108 68 +301 172 6 81 64 71 58 133 92 +96 173 1 114 105 122 76 120 93 +97 173 1 134 129 148 80 132 107 +98 173 1 131 114 139 71 119 98 +99 173 1 116 103 116 61 94 80 +100 173 1 98 78 84 42 65 54 +101 173 1 95 71 72 39 59 51 +102 173 1 111 100 108 68 101 88 +103 173 1 109 97 108 65 99 91 +104 173 1 98 82 88 57 96 84 +264 173 5 73 57 54 65 86 53 +265 173 5 77 63 69 72 109 66 +266 173 5 73 58 58 66 88 55 +267 173 5 76 57 55 59 81 54 +268 173 5 68 53 44 52 62 36 +269 173 5 66 49 42 60 60 35 +270 173 5 68 51 45 59 62 32 +271 173 5 69 55 52 60 77 43 +272 173 5 73 57 58 63 95 56 +273 173 5 75 60 63 63 105 62 +274 173 5 74 61 65 63 112 69 +293 173 6 74 62 63 65 117 70 +294 173 6 75 60 65 64 122 79 +295 173 6 75 58 62 60 110 74 +296 173 6 76 59 63 60 115 71 +297 173 6 77 64 69 62 125 82 +298 173 6 75 63 69 60 114 75 +299 173 6 79 65 73 63 126 84 +300 173 6 77 66 74 61 136 93 +301 173 6 77 65 79 57 141 102 +94 174 1 135 119 140 70 182 189 +95 174 1 113 95 110 62 116 108 +96 174 1 124 119 136 78 146 115 +97 174 1 149 154 185 103 187 154 +98 174 1 155 161 203 110 186 156 +99 174 1 149 152 189 104 178 150 +100 174 1 111 102 113 69 100 79 +101 174 1 95 76 79 50 71 54 +102 174 1 95 81 87 58 90 64 +103 174 1 107 94 107 66 105 89 +104 174 1 109 93 107 59 89 82 +105 174 1 104 90 99 56 90 79 +264 174 5 75 58 58 64 85 53 +265 174 5 76 61 63 66 95 59 +266 174 5 77 63 68 67 109 68 +267 174 5 75 57 60 57 89 57 +268 174 5 71 56 53 54 76 46 +269 174 5 69 52 48 61 75 46 +270 174 5 70 53 44 59 65 35 +271 174 5 69 54 52 59 68 39 +272 174 5 73 56 57 61 91 53 +273 174 5 74 60 64 63 102 60 +274 174 5 76 61 68 63 118 74 +293 174 6 75 61 61 66 113 68 +294 174 6 74 60 61 63 112 70 +295 174 6 75 57 64 61 115 78 +296 174 6 77 63 70 62 126 83 +297 174 6 78 64 70 60 130 87 +298 174 6 79 66 73 59 111 75 +299 174 6 78 65 73 59 111 77 +300 174 6 77 69 79 62 139 98 +301 174 6 81 70 81 60 152 107 +92 175 1 96 80 86 40 65 61 +93 175 1 101 82 88 57 102 92 +94 175 1 103 87 94 60 119 111 +95 175 1 109 95 101 60 102 91 +96 175 1 115 105 119 72 121 105 +97 175 1 139 136 164 94 166 133 +98 175 1 147 155 191 104 207 163 +99 175 1 143 146 179 100 196 163 +100 175 1 110 100 112 75 105 79 +101 175 1 97 84 93 62 90 70 +102 175 1 100 83 91 58 94 77 +103 175 1 104 88 98 58 102 90 +104 175 1 113 98 111 59 109 101 +105 175 1 115 101 113 58 110 102 +264 175 5 76 58 62 62 95 60 +265 175 5 74 59 64 66 114 69 +266 175 5 79 65 74 67 117 76 +267 175 5 79 64 69 58 103 68 +268 175 5 76 60 64 56 84 52 +269 175 5 70 53 51 58 87 51 +270 175 5 69 55 45 60 73 42 +271 175 5 68 55 46 62 69 41 +272 175 5 73 56 57 61 91 53 +273 175 5 74 57 64 58 101 62 +274 175 5 78 59 65 61 114 74 +293 175 6 76 60 66 63 117 74 +294 175 6 76 60 64 63 115 73 +295 175 6 78 64 72 65 134 87 +296 175 6 78 64 73 64 134 88 +297 175 6 77 62 67 61 121 78 +298 175 6 78 61 67 55 114 77 +299 175 6 79 65 72 56 113 80 +300 175 6 80 69 81 60 138 101 +301 175 6 81 69 83 61 148 107 +91 176 1 94 73 80 46 67 60 +92 176 1 92 69 73 30 38 37 +93 176 1 91 74 73 39 51 45 +94 176 1 101 84 85 48 61 50 +95 176 1 103 88 96 58 98 82 +96 176 1 108 90 101 56 102 100 +97 176 1 112 98 108 62 114 100 +98 176 1 120 116 129 80 151 122 +99 176 1 128 120 141 82 154 117 +100 176 1 102 85 99 62 105 77 +101 176 1 100 90 95 61 92 75 +102 176 1 104 90 100 61 95 82 +103 176 1 89 72 73 42 59 54 +104 176 1 95 77 78 44 64 62 +105 176 1 115 102 109 59 108 104 +264 176 5 74 61 64 65 108 67 +265 176 5 73 58 61 64 88 53 +266 176 5 81 65 76 63 123 78 +267 176 5 81 64 71 57 90 64 +268 176 5 74 58 57 57 83 51 +269 176 5 70 56 50 60 71 42 +270 176 5 69 55 48 63 67 37 +271 176 5 71 54 47 59 71 45 +272 176 5 75 57 61 57 89 53 +273 176 5 77 58 63 62 107 65 +274 176 5 76 62 68 62 114 72 +293 176 6 76 59 63 64 124 80 +294 176 6 77 59 64 67 130 83 +295 176 6 80 66 73 69 143 93 +296 176 6 76 66 74 67 137 88 +297 176 6 78 63 69 65 123 78 +298 176 6 79 64 69 60 116 77 +299 176 6 78 65 73 59 122 85 +300 176 6 80 68 80 60 139 99 +301 176 6 84 71 84 61 143 103 +91 177 1 104 82 91 45 61 54 +92 177 1 93 68 67 33 37 37 +93 177 1 94 75 76 36 42 36 +94 177 1 102 83 84 45 69 60 +95 177 1 102 82 92 50 80 71 +96 177 1 107 89 99 59 98 86 +97 177 1 104 89 100 64 89 77 +98 177 1 110 97 106 71 110 85 +99 177 1 101 88 100 60 99 79 +100 177 1 96 82 90 52 72 63 +101 177 1 107 89 100 53 85 82 +102 177 1 105 87 95 51 82 73 +103 177 1 105 87 95 51 82 73 +264 177 5 74 58 59 58 97 58 +265 177 5 72 56 58 59 97 56 +266 177 5 79 63 71 61 113 75 +267 177 5 83 67 69 58 91 66 +268 177 5 81 62 67 59 94 63 +269 177 5 72 57 60 59 92 57 +270 177 5 72 56 57 59 81 47 +271 177 5 71 54 48 59 77 44 +272 177 5 72 57 59 61 86 50 +273 177 5 74 56 61 61 107 63 +274 177 5 75 59 65 59 106 70 +293 177 6 77 59 62 64 113 73 +294 177 6 76 64 68 67 123 78 +295 177 6 80 67 71 68 125 81 +296 177 6 79 66 77 67 133 89 +297 177 6 78 63 69 65 123 78 +298 177 6 78 62 65 63 124 81 +299 177 6 76 60 64 60 119 80 +300 177 6 81 66 78 61 138 96 +301 177 6 84 74 88 62 148 105 +92 178 1 102 86 90 48 65 59 +93 178 1 99 76 80 39 54 49 +94 178 1 96 72 74 36 44 42 +95 178 1 92 73 74 43 49 44 +96 178 1 100 86 94 58 91 75 +97 178 1 105 85 94 58 89 76 +98 178 1 101 91 99 67 102 87 +99 178 1 99 82 81 55 73 61 +100 178 1 136 134 152 75 119 113 +101 178 1 138 124 142 67 106 99 +102 178 1 108 94 101 59 118 103 +264 178 5 74 58 62 57 99 62 +265 178 5 74 57 62 59 96 60 +266 178 5 76 61 63 58 82 53 +267 178 5 80 65 66 59 97 62 +268 178 5 77 58 64 61 90 59 +269 178 5 73 60 64 62 100 60 +270 178 5 72 58 57 55 81 49 +271 178 5 69 51 45 57 66 38 +272 178 5 69 55 49 62 78 49 +273 178 5 71 55 54 60 88 53 +274 178 5 76 59 62 63 93 55 +293 178 6 76 65 69 57 111 75 +294 178 6 80 66 73 59 122 81 +295 178 6 83 68 78 59 126 86 +296 178 6 82 69 79 59 128 88 +297 178 6 78 63 69 64 129 86 +298 178 6 78 61 65 61 122 83 +299 178 6 80 69 79 61 139 97 +300 178 6 80 69 79 61 139 97 +301 178 6 81 72 85 63 138 102 +92 179 1 111 91 102 56 91 79 +93 179 1 107 87 96 52 82 74 +94 179 1 102 83 90 45 65 59 +95 179 1 99 79 83 45 59 51 +96 179 1 99 85 85 62 75 66 +97 179 1 101 84 92 65 92 76 +98 179 1 105 93 97 58 85 72 +99 179 1 128 119 129 68 108 98 +100 179 1 163 168 194 89 145 144 +93 180 1 109 93 104 57 91 80 +94 180 1 108 90 100 58 94 81 +95 180 1 104 90 97 63 88 75 +96 180 1 101 84 86 63 80 71 +97 180 1 102 84 92 68 90 73 +98 180 1 106 95 101 55 84 76 +93 181 1 114 95 110 50 98 87 +94 181 1 110 91 99 60 98 84 +95 181 1 100 84 92 69 94 75 +96 181 1 104 89 97 66 89 79 +94 182 1 99 81 85 70 93 76 +192 188 6 68 48 36 15 15 13 +193 188 6 66 46 37 16 15 11 +194 188 6 66 46 35 14 13 13 +195 188 6 66 47 36 15 14 12 +196 188 6 66 46 36 14 13 11 +197 188 6 68 45 36 15 14 14 +198 188 6 64 45 35 14 12 13 +199 188 6 66 47 35 13 12 10 +200 188 6 65 47 35 13 13 11 +201 188 6 66 45 35 15 13 13 +202 188 6 64 44 34 15 12 12 +203 188 6 66 46 34 15 14 11 +204 188 6 65 46 35 15 13 13 +192 189 6 66 46 36 15 14 12 +193 189 6 68 46 36 15 14 11 +194 189 6 67 45 36 15 15 13 +195 189 6 65 47 36 15 14 11 +196 189 6 67 44 37 15 15 12 +197 189 6 67 47 37 15 13 12 +198 189 6 67 45 37 15 13 13 +199 189 6 64 46 33 15 13 12 +200 189 6 65 46 35 15 14 11 +201 189 6 66 44 36 15 14 12 +202 189 6 66 44 34 15 13 9 +203 189 6 66 46 36 15 12 10 +204 189 6 66 46 36 15 13 13 +193 190 6 69 46 36 14 13 12 +194 190 6 67 46 34 16 14 12 +195 190 6 67 46 36 15 14 14 +196 190 6 65 45 34 15 14 11 +197 190 6 66 48 39 16 14 10 +198 190 6 67 47 36 15 15 11 +199 190 6 64 45 35 15 14 14 +200 190 6 65 44 34 15 14 11 +201 190 6 66 45 36 15 13 13 +202 190 6 66 46 35 15 12 10 +203 190 6 64 46 36 14 13 11 +204 190 6 66 46 33 15 14 12 +193 191 6 68 47 36 15 15 11 +194 191 6 67 47 35 15 14 13 +195 191 6 68 46 37 14 13 11 +196 191 6 65 46 33 14 15 13 +197 191 6 66 46 39 15 14 12 +198 191 6 66 47 34 16 14 11 +199 191 6 66 45 36 15 13 12 +200 191 6 66 45 35 14 13 13 +201 191 6 66 46 35 14 12 11 +202 191 6 65 46 37 14 13 12 +203 191 6 65 46 34 15 15 11 +204 191 6 66 46 31 15 14 11 +193 192 6 66 47 36 15 14 12 +194 192 6 68 46 36 14 14 12 +195 192 6 68 46 37 14 13 11 +196 192 6 66 46 36 15 15 14 +197 192 6 68 48 39 15 13 12 +198 192 6 66 45 34 15 14 11 +199 192 6 67 46 34 15 13 12 +200 192 6 66 45 36 13 14 13 +201 192 6 68 45 35 14 12 10 +202 192 6 66 45 37 15 13 12 +203 192 6 66 45 33 15 14 12 +204 192 6 67 45 33 15 13 13 +194 193 6 66 46 35 15 14 13 +195 193 6 65 46 37 16 14 13 +196 193 6 67 47 37 14 15 11 +197 193 6 67 45 36 14 13 11 +198 193 6 64 47 33 15 14 14 +199 193 6 64 47 33 15 14 14 +200 193 6 67 44 37 14 15 11 +201 193 6 66 45 35 15 12 12 +202 193 6 66 46 35 14 12 13 +203 193 6 65 45 35 14 13 13 +204 193 6 66 45 35 15 14 12 +194 194 6 67 48 34 14 14 11 +195 194 6 67 45 36 16 13 11 +196 194 6 66 45 34 15 16 12 +197 194 6 67 47 37 15 13 12 +198 194 6 65 45 35 15 14 13 +199 194 6 65 45 34 15 14 11 +200 194 6 66 46 35 15 13 13 +201 194 6 67 44 36 15 14 14 +202 194 6 67 46 34 14 13 11 +203 194 6 66 46 35 15 13 12 +204 194 6 65 45 34 15 14 11 +194 195 6 67 46 35 15 15 12 +195 195 6 66 46 38 15 13 10 +196 195 6 68 47 34 15 15 12 +197 195 6 66 46 36 14 13 12 +198 195 6 65 46 33 15 15 10 +199 195 6 66 45 36 15 12 11 +200 195 6 66 45 33 14 14 15 +201 195 6 65 45 36 14 13 12 +202 195 6 66 45 35 15 14 10 +203 195 6 65 44 35 15 14 12 +204 195 6 66 47 36 14 14 13 +195 196 6 65 44 35 16 15 13 +196 196 6 67 45 37 15 14 13 +197 196 6 67 46 35 15 14 12 +198 196 6 66 46 34 15 15 11 +199 196 6 65 47 36 15 14 12 +200 196 6 67 45 35 15 14 13 +201 196 6 65 45 36 14 13 12 +202 196 6 64 45 35 15 15 11 +203 196 6 66 46 34 15 15 13 +294 227 4 86 73 78 55 73 53 +295 227 4 83 69 71 64 83 60 +296 227 4 86 71 70 69 85 62 +297 227 4 85 70 70 66 90 67 +298 227 4 89 74 77 64 80 60 +289 228 4 80 69 74 70 107 63 +290 228 4 80 71 79 64 107 73 +291 228 4 88 74 84 61 102 78 +292 228 4 95 82 93 56 90 70 +293 228 4 89 76 85 64 89 62 +294 228 4 83 70 71 73 90 61 +295 228 4 83 70 71 73 90 61 +296 228 4 86 71 73 73 90 62 +297 228 4 88 73 78 73 99 72 +298 228 4 92 78 83 78 102 75 +290 229 4 84 71 76 59 76 54 +291 229 4 87 75 84 62 102 69 +292 229 4 94 82 92 57 98 82 +293 229 4 92 82 93 64 102 76 +294 229 4 90 80 88 68 98 69 +295 229 4 93 81 88 68 98 72 +296 229 4 93 79 88 72 107 77 +297 229 4 98 84 96 66 110 84 +298 229 4 94 81 89 58 98 79 +290 230 4 79 63 66 62 83 60 +291 230 4 76 63 66 74 101 61 +292 230 4 89 74 81 61 98 73 +293 230 4 82 69 74 57 84 65 +294 230 4 85 70 75 52 77 58 +295 230 4 91 71 71 48 69 55 +296 230 4 86 68 69 53 81 58 +297 230 4 96 82 93 61 102 78 +298 230 4 100 90 102 62 95 80 +290 231 4 86 71 80 61 115 79 +291 231 4 75 60 60 74 89 53 +292 231 4 83 67 67 65 88 56 +133 244 4 69 56 44 71 72 38 +134 244 4 68 54 43 73 71 38 +135 244 4 73 59 53 80 82 45 +136 244 4 73 59 56 84 85 45 +137 244 4 69 61 50 83 92 48 +138 244 4 87 76 77 91 109 70 +139 244 4 93 82 79 113 113 72 +140 244 4 75 59 60 84 90 47 +133 245 4 73 60 53 77 93 51 +134 245 4 72 59 52 84 90 47 +135 245 4 76 66 58 87 104 58 +136 245 4 76 66 67 88 109 63 +137 245 4 80 67 67 82 116 67 +138 245 4 77 68 63 90 114 66 +139 245 4 88 81 73 107 115 79 +140 245 4 75 64 56 94 92 49 +133 246 4 73 60 53 77 93 51 +134 246 4 77 64 60 85 106 60 +135 246 4 77 68 66 85 112 65 +136 246 4 79 68 70 87 119 65 +137 246 4 81 72 74 85 128 77 +138 246 4 77 65 65 86 104 55 +139 246 4 81 68 64 81 83 51 +140 246 4 81 66 61 100 75 43 +134 247 4 80 71 73 83 123 73 +135 247 4 78 67 67 78 112 64 +136 247 4 77 64 62 86 108 59 +137 247 4 79 70 70 89 122 70 +138 247 4 80 65 66 80 114 68 +139 247 4 79 64 61 79 81 50 +140 247 4 75 64 59 90 79 43 +141 247 4 70 58 47 101 78 37 +250 247 4 69 55 48 63 68 38 +251 247 4 77 67 71 70 98 61 +134 248 4 79 68 68 79 118 70 +135 248 4 78 62 66 75 106 64 +136 248 4 86 74 78 74 110 72 +137 248 4 87 74 79 69 110 71 +138 248 4 82 71 71 76 106 69 +139 248 4 81 70 67 92 91 50 +140 248 4 81 72 67 91 100 56 +141 248 4 80 67 62 87 89 55 +250 248 4 80 63 60 64 81 52 +251 248 4 87 76 78 72 105 76 +252 248 4 85 76 75 89 109 77 +253 248 4 88 78 79 83 116 78 +254 248 4 89 80 89 73 103 77 +134 249 4 88 76 79 66 110 74 +135 249 4 84 66 74 64 96 68 +136 249 4 83 75 75 78 104 72 +137 249 4 86 74 78 77 90 63 +138 249 4 81 73 72 69 98 62 +139 249 4 80 70 67 91 108 68 +140 249 4 90 73 75 88 105 67 +141 249 4 86 69 65 77 84 50 +251 249 4 84 73 71 95 93 61 +252 249 4 76 61 49 107 84 41 +253 249 4 85 75 74 97 125 74 +254 249 4 83 78 85 87 133 83 +251 250 4 90 81 88 85 126 85 +252 250 4 82 70 65 90 77 47 +253 250 4 90 78 79 84 108 71 +254 250 4 85 79 84 90 134 86 +252 251 4 86 80 82 86 116 82 +253 251 4 84 78 77 98 112 73 +254 251 4 83 75 76 111 121 69 +378 253 6 74 59 55 60 100 61 +377 254 6 75 61 62 68 108 61 +378 254 6 74 59 57 49 71 48 +379 254 6 69 54 45 14 11 12 +376 255 6 70 55 49 66 78 44 +377 255 6 75 61 62 68 108 61 +378 255 6 74 60 57 42 82 53 +379 255 6 72 54 46 16 18 16 +376 256 6 76 61 59 68 106 62 +377 256 6 74 59 52 30 53 36 +378 256 6 69 54 44 14 6 7 +376 257 6 76 61 59 65 93 56 +377 257 6 73 57 52 23 44 30 +132 274 4 72 58 59 62 94 59 +132 275 4 74 58 58 64 97 58 +133 275 4 74 63 65 72 114 64 +134 275 4 74 63 65 72 114 64 +135 275 4 75 58 60 59 82 52 +136 275 4 73 55 51 54 49 31 +137 275 4 64 43 34 23 16 14 +132 276 4 72 57 54 60 97 56 +133 276 4 75 65 68 77 123 70 +134 276 4 73 53 55 49 61 37 +135 276 4 61 40 30 12 10 11 +136 276 4 64 42 34 19 24 20 +137 276 4 68 47 42 42 63 37 +132 277 4 71 55 51 61 91 52 +133 277 4 74 63 61 80 114 63 +134 277 4 74 59 55 67 74 41 +135 277 4 69 50 40 40 35 24 +136 277 4 64 44 35 18 41 30 +137 277 4 71 54 52 53 96 52 +132 278 4 71 56 50 65 75 44 +133 278 4 73 60 58 75 99 54 +134 278 4 79 64 64 79 122 69 +135 278 4 77 63 59 75 109 61 +136 278 4 70 54 46 43 57 37 +137 278 4 76 62 62 69 95 55 +132 279 4 73 58 54 63 85 52 +133 279 4 73 57 56 66 94 55 +134 279 4 76 61 61 71 104 62 +135 279 4 77 63 67 73 109 62 +136 279 4 76 62 60 68 106 63 +137 279 4 75 63 59 82 116 66 +360 290 5 68 48 46 67 109 60 +361 290 5 73 56 59 64 129 79 +362 290 5 78 60 70 66 130 82 +363 290 5 78 62 65 64 120 71 +364 290 5 76 60 61 66 110 68 +365 290 5 75 57 61 64 116 70 +366 290 5 76 63 67 63 124 75 +367 290 5 78 60 63 59 118 74 +368 290 5 76 59 68 62 125 77 +369 290 5 76 60 64 63 117 73 +370 290 5 75 60 63 66 121 75 +371 290 5 72 58 57 68 105 64 +360 291 5 85 72 82 71 127 86 +361 291 5 88 75 84 69 130 87 +362 291 5 78 62 72 65 137 86 +363 291 5 80 64 68 64 124 78 +364 291 5 72 57 58 67 110 65 +365 291 5 77 60 68 63 124 78 +366 291 5 77 60 65 60 125 80 +367 291 5 78 63 66 61 123 74 +368 291 5 75 60 64 62 127 77 +369 291 5 75 62 67 61 126 79 +370 291 5 74 60 61 68 117 70 +371 291 5 72 56 50 73 96 52 +361 292 5 80 62 70 64 127 82 +362 292 5 77 62 66 64 119 75 +363 292 5 78 61 67 63 121 75 +364 292 5 74 59 65 63 117 73 +365 292 5 78 62 69 63 123 77 +366 292 5 76 60 65 62 122 76 +367 292 5 78 62 70 63 132 79 +368 292 5 75 60 69 64 127 79 +369 292 5 77 63 71 61 130 85 +370 292 5 74 59 56 64 102 61 +371 292 5 70 53 47 66 90 45 +361 293 5 73 58 62 64 119 82 +362 293 5 74 60 64 65 119 74 +363 293 5 77 59 65 63 124 75 +364 293 5 79 62 66 63 123 77 +365 293 5 77 60 68 64 125 80 +366 293 5 77 62 70 63 130 80 +367 293 5 80 62 75 63 133 85 +368 293 5 77 61 70 63 128 82 +369 293 5 74 55 54 59 90 54 +370 293 5 73 57 52 60 89 51 +371 293 5 76 57 62 63 120 73 +361 294 5 69 54 53 61 96 62 +362 294 5 76 60 63 67 114 69 +363 294 5 76 61 66 63 123 74 +364 294 5 79 62 70 65 131 81 +365 294 5 77 60 66 63 122 76 +366 294 5 75 61 66 62 124 76 +367 294 5 77 61 67 61 120 70 +368 294 5 77 59 66 63 116 71 +369 294 5 73 56 53 63 92 51 +370 294 5 76 58 60 63 112 67 +371 294 5 79 60 68 63 129 80 +361 295 5 73 58 59 61 85 51 +362 295 5 74 58 58 70 103 58 +363 295 5 77 61 63 65 117 72 +364 295 5 79 63 72 64 132 83 +365 295 5 76 59 63 61 122 73 +366 295 5 73 58 60 60 109 65 +367 295 5 76 59 55 61 96 56 +368 295 5 76 56 58 60 98 60 +369 295 5 71 57 54 67 92 52 +370 295 5 74 56 60 64 108 63 +371 295 5 77 57 61 60 108 66 +361 296 5 76 58 63 69 104 60 +362 296 5 79 63 67 61 127 80 +363 296 5 75 58 67 59 122 78 +364 296 5 75 56 59 59 105 62 +365 296 5 73 57 55 61 88 51 +366 296 5 71 58 55 68 99 56 +367 296 5 74 56 53 61 87 51 +368 296 5 71 56 55 66 90 53 +369 296 5 73 55 54 64 92 53 +370 296 5 72 54 50 63 84 51 +371 296 5 70 57 49 68 89 50 +361 297 5 74 57 64 67 113 67 +362 297 5 76 63 68 61 126 82 +363 297 5 76 59 65 58 115 71 +364 297 5 74 59 59 60 99 61 +365 297 5 73 59 54 67 99 55 +366 297 5 72 56 54 72 107 57 +367 297 5 73 59 55 69 100 55 +368 297 5 73 59 54 67 97 56 +369 297 5 73 58 53 69 93 54 +370 297 5 73 56 52 72 85 48 +371 297 5 72 58 54 70 94 52 +361 298 5 78 61 68 60 116 74 +362 298 5 77 60 61 60 111 69 +363 298 5 74 59 62 64 110 64 +364 298 5 76 60 65 63 110 68 +365 298 5 71 58 54 65 106 62 +366 298 5 71 54 54 65 95 52 +367 298 5 71 55 51 71 101 55 +368 298 5 73 57 51 66 96 55 +369 298 5 72 57 53 67 98 57 +370 298 5 75 59 59 68 105 60 +371 298 5 76 60 64 63 115 69 +362 299 5 75 58 64 60 112 72 +363 299 5 75 57 59 62 112 67 +364 299 5 72 55 57 65 101 60 +365 299 5 71 56 51 66 91 52 +366 299 5 72 55 52 65 94 55 +367 299 5 71 57 50 65 95 53 +368 299 5 71 57 50 65 95 53 +369 299 5 72 55 56 66 102 59 +370 299 5 76 60 63 60 124 77 +371 299 5 81 65 70 53 130 85 +362 300 5 74 58 64 59 115 74 +363 300 5 74 57 59 60 113 70 +364 300 5 74 59 58 64 112 66 +365 300 5 74 59 61 65 106 63 +366 300 5 76 58 56 65 105 61 +367 300 5 72 56 54 66 103 60 +368 300 5 75 57 58 61 111 65 +369 300 5 76 59 65 55 117 79 +370 300 5 84 72 81 56 130 94 +371 300 5 85 72 82 59 128 96 +362 301 5 74 57 65 60 118 74 +363 301 5 73 57 59 63 107 64 +364 301 5 76 58 60 64 118 71 +365 301 5 77 60 65 66 123 75 +366 301 5 73 56 57 62 106 63 +367 301 5 75 59 59 62 111 67 +368 301 5 71 56 57 58 104 63 +369 301 5 81 64 69 56 106 77 +370 301 5 89 75 84 57 118 95 +371 301 5 77 66 68 59 112 73 +362 302 5 75 58 62 62 116 70 +363 302 5 75 58 56 68 107 61 +364 302 5 76 60 65 67 118 72 +365 302 5 76 59 65 66 121 76 +366 302 5 73 56 57 62 106 63 +367 302 5 70 56 58 59 93 54 +368 302 5 75 63 62 57 102 69 +369 302 5 86 76 86 59 117 94 +370 302 5 83 70 80 59 113 84 +371 302 5 73 57 58 59 100 58 +363 303 5 72 56 52 68 99 54 +364 303 5 74 58 64 68 115 70 +365 303 5 74 61 59 66 109 66 +366 303 5 71 58 60 60 97 59 +367 303 5 69 59 59 57 92 53 +368 303 5 83 71 79 59 115 83 +369 303 5 87 74 84 61 124 94 +370 303 5 78 59 65 58 106 64 +371 303 5 73 54 56 57 92 58 +345 307 7 72 59 50 66 73 41 +346 307 7 83 71 70 70 88 58 +347 307 7 112 103 117 65 132 115 +348 307 7 125 118 135 65 138 133 +349 307 7 125 112 129 64 126 118 +350 307 7 121 108 122 59 124 117 +351 307 7 125 113 126 62 131 129 +345 308 7 72 60 47 69 82 46 +346 308 7 79 66 61 77 85 52 +347 308 7 116 105 115 61 112 102 +348 308 7 123 113 126 65 127 125 +349 308 7 125 113 126 63 116 114 +350 308 7 118 108 121 63 107 103 +351 308 7 132 125 148 71 130 124 +198 309 5 69 53 54 62 82 49 +199 309 5 72 54 57 56 94 58 +200 309 5 72 56 57 65 98 60 +201 309 5 74 59 60 70 107 65 +202 309 5 75 59 60 68 116 70 +203 309 5 71 59 54 69 112 68 +204 309 5 74 58 63 62 101 63 +205 309 5 77 59 68 63 111 68 +206 309 5 76 56 62 57 117 71 +207 309 5 73 58 59 61 118 71 +208 309 5 75 61 67 62 127 82 +209 309 5 80 65 70 63 122 84 +345 309 7 74 58 52 72 83 47 +346 309 7 92 79 84 64 95 72 +347 309 7 124 108 124 60 124 117 +348 309 7 138 123 135 64 142 137 +349 309 7 138 123 135 64 142 137 +350 309 7 146 135 148 71 145 139 +351 309 7 143 132 150 73 142 145 +198 310 5 68 51 50 60 73 42 +199 310 5 69 53 50 63 74 44 +200 310 5 71 56 53 66 86 49 +201 310 5 74 58 57 71 104 62 +202 310 5 74 57 57 69 106 65 +203 310 5 71 57 53 66 96 55 +204 310 5 75 60 68 64 112 71 +205 310 5 77 63 68 63 120 75 +206 310 5 71 53 49 53 80 48 +207 310 5 71 54 54 61 94 57 +208 310 5 74 58 62 65 110 69 +209 310 5 78 60 66 64 109 70 +345 310 7 72 56 47 70 74 41 +346 310 7 79 68 68 73 96 58 +347 310 7 113 101 113 60 117 113 +348 310 7 149 137 151 70 165 159 +349 310 7 170 157 176 80 182 183 +350 310 7 144 132 151 72 150 162 +351 310 7 147 138 164 79 145 145 +199 311 5 71 55 56 63 85 49 +200 311 5 73 56 56 65 85 51 +201 311 5 74 58 58 70 100 60 +202 311 5 73 57 56 68 100 58 +203 311 5 73 59 59 71 102 56 +204 311 5 77 62 68 68 121 75 +205 311 5 75 55 57 53 98 62 +206 311 5 69 52 44 52 60 36 +207 311 5 71 53 49 61 76 43 +208 311 5 74 61 64 68 120 72 +209 311 5 76 62 65 66 109 67 +345 311 7 68 52 43 62 59 35 +346 311 7 72 58 53 64 75 43 +347 311 7 97 87 102 62 111 89 +348 311 7 132 125 148 71 151 143 +349 311 7 151 140 166 77 161 161 +350 311 7 139 132 155 75 139 138 +351 311 7 153 143 170 82 152 150 +199 312 5 71 56 56 58 91 54 +200 312 5 71 56 53 60 85 54 +201 312 5 71 55 53 64 92 53 +202 312 5 73 58 58 70 98 55 +203 312 5 73 57 56 70 109 62 +204 312 5 74 56 59 57 108 69 +205 312 5 71 53 51 50 72 44 +206 312 5 70 52 46 55 63 38 +207 312 5 71 53 47 58 73 43 +208 312 5 75 59 61 65 117 69 +209 312 5 75 62 67 65 125 78 +345 312 7 69 50 43 58 48 30 +346 312 7 72 55 45 64 56 32 +347 312 7 80 64 70 65 94 54 +348 312 7 102 95 111 69 122 103 +349 312 7 123 114 131 69 130 123 +350 312 7 129 117 130 63 119 113 +351 312 7 128 115 130 64 126 121 +199 313 5 71 57 51 64 79 47 +200 313 5 71 58 55 65 87 51 +201 313 5 75 60 60 68 99 58 +202 313 5 71 57 55 66 94 54 +203 313 5 70 56 56 64 99 58 +204 313 5 71 51 49 49 73 47 +205 313 5 70 52 48 58 65 37 +206 313 5 70 53 48 58 70 40 +207 313 5 67 52 46 54 62 38 +208 313 5 72 56 53 61 95 55 +209 313 5 74 59 60 66 119 71 +345 313 7 66 49 42 57 48 27 +346 313 7 72 55 47 69 72 41 +347 313 7 66 54 45 66 64 32 +348 313 7 82 71 79 71 89 59 +349 313 7 117 106 114 64 111 108 +350 313 7 117 104 109 60 104 100 +351 313 7 104 89 95 58 84 70 +199 314 5 73 58 56 64 86 51 +200 314 5 74 59 59 61 96 61 +201 314 5 75 59 63 61 105 65 +202 314 5 72 56 53 54 82 48 +203 314 5 68 52 49 52 60 37 +204 314 5 69 54 46 59 64 37 +205 314 5 67 51 46 58 66 36 +206 314 5 69 51 44 58 62 35 +207 314 5 69 53 46 61 65 37 +208 314 5 72 57 54 62 94 53 +209 314 5 71 54 51 54 92 56 +199 315 5 72 59 59 62 98 59 +200 315 5 77 60 65 60 112 70 +201 315 5 77 59 69 58 111 69 +202 315 5 74 57 59 54 85 51 +203 315 5 73 54 54 56 80 48 +204 315 5 68 56 47 61 70 40 +205 315 5 67 53 44 62 64 35 +206 315 5 70 51 45 62 70 37 +207 315 5 70 50 45 62 69 38 +208 315 5 68 56 50 64 74 42 +209 315 5 71 53 49 60 74 44 +199 316 5 76 63 70 67 116 69 +200 316 5 80 63 76 61 126 76 +201 316 5 77 59 69 58 111 69 +202 316 5 77 61 70 60 112 70 +203 316 5 74 57 61 61 102 61 +204 316 5 69 54 48 62 79 44 +205 316 5 68 51 42 59 62 34 +206 316 5 67 52 43 61 60 33 +207 316 5 69 53 43 65 65 35 +208 316 5 69 52 47 63 66 37 +209 316 5 70 53 47 64 71 40 +199 317 5 79 64 75 68 130 83 +200 317 5 77 63 70 60 121 74 +201 317 5 77 62 72 61 118 73 +202 317 5 78 62 68 64 120 74 +203 317 5 73 58 62 63 105 63 +204 317 5 72 54 52 61 79 48 +205 317 5 69 50 43 59 62 35 +206 317 5 66 52 43 63 63 36 +207 317 5 67 52 43 65 68 38 +208 317 5 70 52 43 67 69 38 +209 317 5 69 54 44 67 71 38 +240 317 3 75 67 72 89 101 64 +241 317 3 77 70 78 89 109 67 +242 317 3 88 84 101 85 136 102 +243 317 3 90 81 89 94 133 105 +244 317 3 86 80 90 94 134 98 +245 317 3 78 74 74 106 118 77 +246 317 3 91 84 98 93 135 101 +247 317 3 86 80 89 89 120 85 +248 317 3 80 74 75 91 109 73 +274 317 3 73 63 56 105 100 51 +275 317 3 74 63 51 113 98 49 +276 317 3 74 63 50 117 100 49 +277 317 3 74 66 54 116 102 54 +278 317 3 75 66 53 110 103 51 +279 317 3 72 63 56 107 96 46 +280 317 3 74 64 56 110 98 49 +281 317 3 78 71 65 99 121 66 +282 317 3 80 71 66 99 122 69 +283 317 3 80 68 69 94 127 76 +284 317 3 80 73 69 103 123 68 +285 317 3 81 72 69 94 126 73 +286 317 3 80 75 77 84 131 79 +287 317 3 81 68 70 86 113 71 +288 317 3 76 65 63 98 108 70 +289 317 3 88 75 83 94 138 90 +199 318 5 75 58 65 60 119 72 +200 318 5 73 59 61 60 107 69 +201 318 5 74 61 66 64 115 70 +202 318 5 75 58 62 59 107 64 +203 318 5 75 61 63 62 104 62 +204 318 5 75 58 58 60 96 57 +205 318 5 73 53 52 59 79 46 +206 318 5 71 54 52 65 77 43 +207 318 5 70 54 52 67 80 47 +208 318 5 68 52 42 71 75 44 +209 318 5 66 54 44 68 73 37 +240 318 3 83 77 87 90 121 82 +241 318 3 81 73 75 92 117 75 +242 318 3 79 72 73 96 108 67 +243 318 3 77 71 74 100 109 67 +244 318 3 78 71 79 96 128 85 +245 318 3 81 75 83 94 120 77 +246 318 3 83 77 80 94 115 75 +247 318 3 95 93 108 81 130 106 +248 318 3 85 80 84 92 124 91 +249 318 3 72 60 52 102 96 47 +274 318 3 72 63 53 110 99 50 +275 318 3 73 63 52 112 98 48 +276 318 3 74 63 54 114 103 53 +277 318 3 76 66 56 116 107 57 +278 318 3 75 67 55 109 108 59 +279 318 3 72 62 53 106 98 53 +280 318 3 74 64 55 115 96 45 +281 318 3 74 69 61 114 111 57 +282 318 3 77 68 57 110 107 59 +283 318 3 78 69 66 102 123 66 +284 318 3 78 71 64 98 120 67 +285 318 3 78 68 68 94 122 73 +286 318 3 79 70 64 98 118 65 +287 318 3 74 62 55 82 101 55 +288 318 3 105 96 110 79 139 116 +289 318 3 88 75 83 94 138 90 +240 319 3 91 89 103 82 137 108 +241 319 3 93 89 106 79 136 106 +242 319 3 91 84 100 85 128 98 +243 319 3 88 84 105 85 133 102 +244 319 3 90 86 111 81 147 110 +245 319 3 88 83 92 88 133 94 +246 319 3 94 91 105 83 136 109 +247 319 3 83 77 80 96 120 88 +248 319 3 71 62 51 101 103 61 +249 319 3 76 64 56 97 102 58 +275 319 3 73 61 50 108 100 52 +276 319 3 74 64 58 106 108 56 +277 319 3 76 64 55 111 112 55 +278 319 3 74 61 53 95 100 53 +279 319 3 70 57 47 80 84 47 +280 319 3 72 64 52 90 90 47 +281 319 3 74 66 58 104 106 53 +282 319 3 75 63 51 101 99 53 +283 319 3 77 67 58 110 105 51 +284 319 3 73 66 53 104 105 54 +285 319 3 73 63 58 107 96 49 +286 319 3 75 65 49 115 95 45 +287 319 3 69 57 39 101 83 34 +288 319 3 127 118 141 81 164 136 +289 319 3 109 99 113 89 152 118 +240 320 3 78 69 65 95 108 67 +241 320 3 79 75 78 90 116 81 +242 320 3 85 77 88 91 123 90 +243 320 3 81 74 81 97 122 86 +244 320 3 74 66 64 108 114 76 +245 320 3 88 85 96 89 137 105 +246 320 3 77 68 65 103 109 74 +247 320 3 75 61 54 104 95 48 +248 320 3 74 64 58 95 94 49 +249 320 3 75 63 57 100 92 51 +275 320 3 72 60 51 105 91 46 +276 320 3 74 66 60 96 113 61 +277 320 3 76 65 60 96 114 61 +278 320 3 74 59 57 76 98 54 +279 320 3 72 59 55 70 80 47 +280 320 3 74 64 55 79 92 56 +281 320 3 76 63 55 95 95 52 +282 320 3 74 64 55 103 108 60 +283 320 3 75 63 54 104 101 52 +284 320 3 74 67 51 112 107 56 +285 320 3 74 63 53 112 103 51 +286 320 3 71 63 48 116 99 49 +287 320 3 70 57 36 118 85 36 +288 320 3 107 98 114 94 145 117 +289 320 3 137 127 149 93 185 154 +348 320 7 116 111 127 70 125 110 +349 320 7 125 118 135 70 120 113 +350 320 7 137 132 154 74 137 131 +240 321 3 90 88 96 79 112 83 +241 321 3 86 77 83 87 104 69 +242 321 3 82 71 73 95 105 59 +243 321 3 75 68 65 104 101 52 +244 321 3 76 65 59 111 87 41 +245 321 3 73 62 55 114 104 59 +246 321 3 74 67 59 103 89 42 +247 321 3 78 69 65 89 96 50 +248 321 3 77 70 68 91 107 68 +249 321 3 77 69 62 99 95 58 +275 321 3 75 65 60 91 116 63 +276 321 3 75 65 60 91 116 63 +277 321 3 77 66 65 83 120 66 +278 321 3 77 62 61 81 103 59 +279 321 3 75 67 63 92 106 60 +280 321 3 77 69 68 91 116 66 +281 321 3 77 63 58 92 96 54 +282 321 3 78 69 60 105 109 56 +283 321 3 75 64 53 105 110 56 +284 321 3 76 67 52 113 110 57 +285 321 3 75 66 56 112 110 55 +286 321 3 77 68 58 114 107 57 +287 321 3 75 68 53 118 103 50 +288 321 3 81 72 69 105 111 65 +289 321 3 124 116 131 97 177 144 +348 321 7 130 119 138 67 127 122 +349 321 7 132 116 128 65 119 117 +350 321 7 130 120 137 63 122 119 +240 322 3 82 77 78 88 117 91 +241 322 3 86 75 83 89 120 92 +242 322 3 89 78 89 88 124 94 +243 322 3 82 78 80 88 113 80 +244 322 3 73 61 54 85 86 49 +245 322 3 79 69 61 107 89 46 +246 322 3 81 76 82 92 113 76 +247 322 3 90 87 102 80 129 96 +248 322 3 89 83 94 78 123 95 +249 322 3 81 74 80 89 109 77 +275 322 3 72 63 55 89 103 56 +276 322 3 65 53 44 94 91 45 +277 322 3 86 74 74 97 113 68 +278 322 3 109 105 115 105 131 95 +279 322 3 97 93 102 97 128 85 +280 322 3 77 66 64 82 108 62 +281 322 3 90 79 81 89 121 89 +282 322 3 90 84 90 95 135 94 +283 322 3 83 73 69 101 114 62 +284 322 3 83 75 73 99 117 74 +285 322 3 87 79 76 100 127 85 +286 322 3 85 77 77 99 120 79 +287 322 3 80 74 66 100 119 78 +288 322 3 80 74 66 100 119 78 +289 322 3 82 76 71 101 116 68 +348 322 7 116 101 112 54 99 97 +349 322 7 124 110 123 59 113 109 +350 322 7 109 93 95 54 85 87 +240 323 3 85 75 83 85 107 70 +241 323 3 89 81 93 80 120 93 +242 323 3 99 97 118 77 136 121 +243 323 3 96 89 103 75 110 93 +244 323 3 75 59 57 68 72 45 +245 323 3 74 60 58 87 87 52 +246 323 3 87 84 98 83 128 98 +247 323 3 97 97 118 74 144 125 +248 323 3 90 86 95 78 122 99 +249 323 3 79 69 72 97 101 65 +250 323 3 81 74 81 94 123 88 +276 323 3 76 65 60 94 78 30 +277 323 3 95 89 92 93 126 91 +278 323 3 154 156 182 109 215 199 +279 323 3 132 129 145 114 205 174 +280 323 3 79 69 73 83 120 80 +281 323 3 95 88 99 82 152 109 +282 323 3 85 73 81 88 139 88 +283 323 3 89 84 91 88 137 96 +284 323 3 90 79 87 87 146 97 +285 323 3 88 80 84 85 144 94 +286 323 3 87 76 82 82 135 87 +287 323 3 86 79 87 84 131 88 +288 323 3 89 81 87 92 127 87 +289 323 3 79 72 67 103 108 61 +240 324 3 91 88 108 73 131 109 +241 324 3 101 102 130 73 140 122 +242 324 3 102 95 113 68 115 100 +243 324 3 76 61 60 66 73 46 +244 324 3 72 59 55 72 85 53 +245 324 3 88 86 104 77 127 97 +246 324 3 99 98 121 74 136 118 +247 324 3 90 83 89 84 113 88 +248 324 3 90 83 89 84 113 88 +249 324 3 73 62 55 109 91 54 +250 324 3 69 56 51 119 86 43 +276 324 3 76 65 60 94 78 30 +277 324 3 90 84 89 85 113 80 +278 324 3 130 130 147 101 209 196 +279 324 3 120 119 129 113 209 189 +280 324 3 79 70 74 77 136 86 +281 324 3 89 78 86 78 178 115 +282 324 3 86 73 77 77 151 92 +283 324 3 87 78 80 87 155 102 +284 324 3 83 72 76 86 134 86 +285 324 3 85 72 75 79 130 82 +286 324 3 82 70 75 84 128 80 +287 324 3 85 74 78 86 144 93 +288 324 3 83 71 71 96 132 86 +289 324 3 88 78 78 99 126 84 +240 325 3 90 86 116 75 134 108 +241 325 3 95 98 127 73 135 116 +242 325 3 98 92 104 70 114 95 +243 325 3 73 59 54 73 77 47 +244 325 3 77 68 71 76 98 66 +245 325 3 92 90 110 71 133 105 +246 325 3 96 94 116 76 130 105 +247 325 3 87 78 80 84 104 76 +248 325 3 81 71 72 86 103 70 +249 325 3 77 64 62 106 97 60 +250 325 3 72 59 44 122 86 42 +276 325 3 104 97 101 88 115 85 +277 325 3 109 103 109 91 120 83 +278 325 3 106 102 117 101 154 122 +279 325 3 94 88 87 97 156 121 +280 325 3 83 68 66 69 128 82 +281 325 3 79 65 61 55 95 59 +282 325 3 76 64 58 60 74 47 +283 325 3 80 69 70 84 133 83 +284 325 3 78 65 63 81 124 74 +285 325 3 81 66 64 80 125 75 +286 325 3 81 70 69 88 122 74 +287 325 3 82 72 73 89 120 71 +288 325 3 80 68 67 100 114 67 +289 325 3 78 62 57 107 108 65 +240 326 3 92 87 113 73 129 104 +241 326 3 97 96 124 71 131 116 +242 326 3 92 83 94 67 103 83 +243 326 3 72 56 50 69 69 40 +244 326 3 74 63 63 78 92 62 +245 326 3 94 92 109 74 135 109 +246 326 3 83 77 81 91 113 81 +247 326 3 73 61 52 102 89 51 +248 326 3 83 72 78 83 105 74 +249 326 3 92 85 99 73 119 93 +250 326 3 89 83 91 82 121 89 +276 326 3 133 129 145 87 160 140 +277 326 3 131 125 144 85 165 147 +278 326 3 104 93 102 91 143 118 +279 326 3 87 74 71 78 106 72 +280 326 3 74 58 50 35 50 28 +281 326 3 71 54 47 20 25 20 +282 326 3 75 65 59 61 57 35 +283 326 3 76 63 58 84 98 56 +284 326 3 76 61 59 91 106 60 +285 326 3 78 67 62 93 113 66 +286 326 3 81 72 74 88 130 80 +287 326 3 83 73 77 84 137 83 +288 326 3 82 72 75 87 125 72 +289 326 3 80 66 63 102 118 66 +389 326 1 116 107 128 68 151 128 +390 326 1 104 91 101 62 111 85 +391 326 1 104 91 101 62 111 85 +392 326 1 119 104 112 59 107 92 +393 326 1 113 95 110 52 87 80 +394 326 1 95 76 82 47 65 53 +395 326 1 86 70 70 67 83 53 +396 326 1 84 70 75 73 100 68 +397 326 1 81 69 68 63 82 53 +398 326 1 72 58 51 73 70 42 +399 326 1 69 56 46 84 77 35 +240 327 3 95 87 107 71 120 102 +241 327 3 102 97 119 72 127 109 +242 327 3 86 75 84 61 98 79 +243 327 3 68 54 47 59 68 39 +244 327 3 68 56 48 83 77 40 +245 327 3 84 78 87 85 112 83 +246 327 3 78 70 65 96 101 61 +247 327 3 69 54 38 115 85 39 +248 327 3 69 55 40 120 84 39 +249 327 3 77 68 63 105 98 58 +250 327 3 82 74 76 92 106 73 +277 327 3 122 115 122 97 156 133 +278 327 3 99 92 95 96 142 113 +279 327 3 81 71 65 69 93 65 +280 327 3 70 53 48 28 15 11 +281 327 3 71 55 49 34 12 9 +282 327 3 75 65 59 61 57 35 +283 327 3 78 66 61 91 107 58 +284 327 3 74 62 55 98 103 53 +285 327 3 76 66 59 95 111 58 +286 327 3 81 68 66 94 120 65 +287 327 3 80 69 65 87 115 71 +288 327 3 82 73 77 83 123 83 +289 327 3 80 70 65 94 114 64 +380 327 1 157 147 171 93 161 142 +381 327 1 119 100 109 57 94 95 +382 327 1 97 74 77 42 82 78 +383 327 1 111 96 103 57 88 84 +384 327 1 108 89 100 54 86 78 +385 327 1 105 84 91 47 77 73 +386 327 1 116 95 104 58 81 79 +387 327 1 147 137 161 89 165 145 +388 327 1 140 137 163 86 188 160 +389 327 1 106 95 109 57 110 95 +390 327 1 78 58 63 46 80 57 +391 327 1 93 77 78 60 83 56 +392 327 1 97 84 95 66 96 75 +393 327 1 91 74 78 63 89 68 +394 327 1 88 74 76 65 80 58 +395 327 1 85 72 76 65 99 71 +396 327 1 81 66 65 75 88 57 +397 327 1 73 63 56 80 88 49 +398 327 1 69 54 44 85 72 37 +399 327 1 69 56 41 98 80 39 +240 328 3 92 83 100 70 114 92 +241 328 3 104 99 122 71 125 108 +242 328 3 86 75 85 55 88 71 +243 328 3 67 52 44 58 65 38 +244 328 3 73 62 53 88 79 39 +245 328 3 84 77 81 88 114 82 +246 328 3 90 86 99 78 128 100 +247 328 3 82 74 82 89 106 69 +248 328 3 73 61 51 114 86 41 +249 328 3 67 54 32 129 83 39 +250 328 3 67 53 37 126 85 39 +277 328 3 85 76 72 98 126 86 +278 328 3 81 70 65 91 113 67 +279 328 3 79 66 65 64 79 51 +280 328 3 77 64 56 52 57 38 +281 328 3 75 62 58 76 80 43 +282 328 3 79 68 63 91 118 69 +283 328 3 73 65 59 95 103 50 +284 328 3 82 69 63 94 97 50 +285 328 3 82 69 63 94 97 50 +286 328 3 87 78 82 93 113 70 +287 328 3 90 77 80 82 114 80 +288 328 3 89 82 87 74 102 81 +289 328 3 82 72 71 100 118 75 +380 328 1 119 99 107 63 109 105 +381 328 1 104 81 85 46 76 72 +382 328 1 101 83 87 46 83 77 +383 328 1 98 81 85 49 87 77 +384 328 1 105 85 94 51 88 82 +385 328 1 107 87 96 49 90 86 +386 328 1 109 90 96 48 87 80 +387 328 1 110 90 101 55 94 89 +388 328 1 118 107 124 70 112 97 +389 328 1 135 131 152 79 111 88 +390 328 1 107 91 101 58 70 60 +391 328 1 100 86 94 59 73 62 +392 328 1 89 70 72 55 79 65 +393 328 1 88 74 73 71 91 64 +394 328 1 94 80 85 65 88 60 +395 328 1 90 76 83 67 99 70 +396 328 1 80 67 62 85 97 58 +397 328 1 84 77 73 86 111 74 +398 328 1 81 66 62 79 80 53 +399 328 1 74 58 49 94 82 45 +400 328 1 81 68 60 82 87 51 +240 329 3 84 75 75 75 100 76 +241 329 3 95 86 98 76 125 104 +242 329 3 79 67 69 64 73 63 +243 329 3 68 55 48 78 68 38 +244 329 3 74 62 50 101 84 46 +245 329 3 87 80 85 89 113 83 +246 329 3 91 88 108 75 129 105 +247 329 3 90 86 105 79 135 103 +248 329 3 83 75 76 94 122 81 +249 329 3 72 62 49 113 98 48 +250 329 3 71 59 44 118 92 47 +251 329 3 69 55 38 121 84 40 +380 329 1 92 74 76 62 82 74 +381 329 1 100 82 86 52 78 72 +382 329 1 101 83 90 52 84 78 +383 329 1 104 88 95 52 89 81 +384 329 1 103 86 94 55 92 86 +385 329 1 104 86 94 53 93 87 +386 329 1 107 91 99 53 92 88 +387 329 1 96 78 83 51 79 79 +388 329 1 119 107 123 70 103 82 +389 329 1 135 131 152 79 111 88 +390 329 1 139 131 158 79 128 90 +391 329 1 113 105 117 57 105 77 +392 329 1 87 64 64 44 58 54 +393 329 1 94 78 79 66 84 67 +394 329 1 98 81 89 56 86 67 +395 329 1 95 81 87 60 91 80 +396 329 1 85 75 74 78 113 69 +397 329 1 96 84 89 73 111 79 +398 329 1 93 79 81 64 98 70 +399 329 1 87 73 73 61 85 60 +400 329 1 87 71 68 68 80 60 +240 330 3 73 63 55 104 97 58 +241 330 3 71 58 46 111 85 46 +242 330 3 71 59 51 103 85 43 +243 330 3 72 59 49 107 86 43 +244 330 3 74 62 50 101 84 46 +245 330 3 75 64 55 106 98 59 +246 330 3 76 64 62 103 108 65 +247 330 3 80 70 70 99 113 70 +248 330 3 80 73 70 96 112 67 +249 330 3 78 66 56 103 97 52 +250 330 3 71 59 45 113 84 43 +251 330 3 71 62 47 118 93 46 +380 330 1 87 73 74 72 84 64 +381 330 1 94 79 85 65 83 72 +382 330 1 98 81 87 64 88 76 +383 330 1 101 84 92 55 88 84 +384 330 1 99 84 91 59 95 81 +385 330 1 101 86 92 60 97 88 +386 330 1 99 83 90 59 98 91 +387 330 1 96 81 85 59 90 80 +388 330 1 92 76 80 55 90 73 +389 330 1 144 139 165 87 112 85 +390 330 1 107 96 114 65 96 77 +391 330 1 101 88 100 62 89 71 +392 330 1 95 79 82 60 79 66 +393 330 1 99 80 83 55 77 65 +394 330 1 98 79 87 50 74 66 +395 330 1 92 75 80 57 94 71 +396 330 1 96 81 91 67 95 76 +397 330 1 96 82 92 60 82 71 +398 330 1 95 77 81 51 79 63 +399 330 1 96 80 87 45 70 56 +400 330 1 87 71 76 58 73 51 +240 331 3 71 57 42 123 85 39 +241 331 3 68 56 40 131 84 37 +242 331 3 73 61 48 102 88 44 +243 331 3 71 58 48 100 84 41 +244 331 3 68 56 35 124 82 39 +245 331 3 67 54 34 122 80 38 +246 331 3 75 65 62 99 106 61 +247 331 3 85 76 81 84 123 80 +248 331 3 80 68 63 97 104 62 +249 331 3 70 59 44 115 83 41 +250 331 3 70 58 42 122 78 37 +251 331 3 70 60 43 125 84 38 +380 331 1 97 81 84 64 75 53 +381 331 1 98 80 87 64 90 72 +382 331 1 95 77 79 66 91 70 +383 331 1 98 81 85 63 94 83 +384 331 1 97 81 87 61 90 77 +385 331 1 94 79 83 60 90 73 +386 331 1 93 75 79 61 92 75 +387 331 1 95 76 80 58 90 76 +388 331 1 93 78 82 56 88 74 +389 331 1 86 69 73 53 91 68 +390 331 1 90 72 72 58 83 67 +391 331 1 94 75 78 59 81 68 +392 331 1 100 80 83 59 83 69 +393 331 1 99 85 90 51 82 75 +394 331 1 98 80 86 64 86 70 +395 331 1 88 73 78 64 87 62 +396 331 1 92 78 81 61 103 72 +397 331 1 93 80 88 55 88 73 +398 331 1 93 72 80 46 71 58 +399 331 1 97 79 93 49 86 71 +400 331 1 92 76 82 52 72 58 +380 332 1 93 76 84 58 83 66 +381 332 1 87 71 73 55 71 49 +382 332 1 90 67 69 45 55 44 +383 332 1 93 75 79 51 76 63 +384 332 1 88 69 70 46 69 52 +385 332 1 88 67 68 44 67 52 +386 332 1 94 76 79 48 79 67 +387 332 1 94 76 79 48 79 67 +388 332 1 93 78 82 51 79 69 +389 332 1 95 80 85 58 79 69 +390 332 1 102 86 92 57 73 71 +391 332 1 100 84 93 61 81 76 +392 332 1 92 76 79 69 88 71 +393 332 1 90 76 80 67 84 69 +394 332 1 93 78 83 61 95 67 +395 332 1 88 73 78 64 87 62 +396 332 1 94 71 76 44 70 53 +397 332 1 94 75 80 45 77 64 +398 332 1 97 76 86 45 67 56 +399 332 1 95 75 85 47 70 60 +400 332 1 93 74 75 46 68 57 +380 333 1 94 75 82 52 82 71 +381 333 1 94 71 75 45 61 47 +382 333 1 89 69 71 39 56 46 +383 333 1 91 71 74 42 61 52 +384 333 1 87 65 67 40 49 42 +385 333 1 86 63 64 37 43 33 +386 333 1 89 66 67 37 53 47 +387 333 1 89 69 70 35 57 53 +388 333 1 94 81 86 57 91 66 +389 333 1 118 105 115 72 110 98 +390 333 1 108 96 102 74 103 86 +391 333 1 88 75 78 65 87 66 +392 333 1 93 80 85 62 86 68 +393 333 1 92 78 82 58 89 66 +394 333 1 97 77 83 50 68 56 +395 333 1 97 75 77 39 57 52 +396 333 1 96 74 80 41 64 53 +397 333 1 98 75 81 42 66 54 +398 333 1 97 77 82 41 62 51 +399 333 1 93 73 75 38 59 49 +400 333 1 93 73 75 38 59 49 +396 334 1 101 82 86 47 59 46 +397 334 1 92 72 76 40 58 50 +398 334 1 95 72 76 40 62 52 +399 334 1 93 72 78 40 60 48 +400 334 1 92 74 78 41 61 52 +185 346 4 80 69 69 85 110 70 +186 346 4 71 59 52 77 79 42 +185 347 4 85 71 73 76 86 58 +186 347 4 74 61 55 79 74 42 +187 347 4 75 67 61 80 95 56 +188 347 4 78 68 66 66 109 72 +184 348 4 80 70 69 71 92 59 +185 348 4 91 79 81 78 93 67 +186 348 4 76 62 57 84 77 43 +187 348 4 76 66 60 86 94 55 +188 348 4 84 72 69 81 103 71 +189 348 4 81 68 63 85 92 64 +184 349 4 74 60 55 70 81 49 +185 349 4 87 76 72 80 93 61 +186 349 4 74 63 58 81 80 45 +187 349 4 77 64 62 81 95 55 +188 349 4 81 68 64 94 102 62 +189 349 4 73 60 52 97 82 48 +184 350 4 82 71 72 80 104 64 +185 350 4 71 59 55 72 82 47 +186 350 4 75 58 55 74 87 52 +187 350 4 81 69 68 85 112 64 +188 350 4 77 63 59 83 90 48 +189 350 4 73 56 50 75 77 46 +184 351 4 80 73 73 85 101 65 +185 351 4 78 65 64 78 77 45 +186 351 4 72 60 51 79 75 43 +187 351 4 81 70 68 83 113 67 +188 351 4 79 67 67 81 107 62 +189 351 4 71 56 49 76 74 39 +184 352 4 86 82 81 92 116 77 +185 352 4 79 63 60 75 78 50 +186 352 4 75 57 53 74 70 37 +187 352 4 79 68 68 79 96 61 +188 352 4 83 71 74 81 112 72 +189 352 4 75 61 61 79 101 55 +183 353 4 84 73 70 80 103 66 +184 353 4 87 79 83 81 116 80 +185 353 4 85 71 73 78 94 64 +186 353 4 85 68 70 77 84 54 +187 353 4 89 79 79 77 88 63 +188 353 4 85 76 78 79 103 71 +189 353 4 80 71 71 84 111 68 +183 354 4 74 64 58 82 95 54 +184 354 4 82 68 69 85 100 65 +185 354 4 80 65 66 70 97 64 +186 354 4 83 70 70 68 97 68 +187 354 4 88 75 74 70 84 59 +188 354 4 82 69 63 80 81 48 +189 354 4 81 70 71 85 105 66 +183 355 4 75 63 58 77 83 48 +184 355 4 84 71 71 89 98 60 +185 355 4 73 59 57 76 85 49 +186 355 4 74 58 55 66 75 44 +187 355 4 75 58 53 70 64 38 +188 355 4 69 54 44 85 68 34 +371 358 7 101 95 111 74 118 91 +372 358 7 118 105 119 69 137 124 +373 358 7 110 105 118 70 134 120 +371 359 7 102 97 111 75 118 92 +372 359 7 106 97 108 70 120 100 +373 359 7 110 105 118 70 134 120 +374 359 7 112 107 122 73 136 120 +371 360 7 108 100 114 62 119 107 +372 360 7 104 88 94 54 95 81 +373 360 7 109 101 111 69 108 92 +372 361 7 102 85 91 53 94 86 +373 361 7 110 95 104 63 97 86 +372 362 7 105 93 103 56 102 94 +373 362 7 111 93 95 51 87 78 +295 370 5 69 56 56 58 95 56 +296 370 5 72 55 57 59 105 66 +297 370 5 74 58 61 61 113 69 +298 370 5 75 57 63 62 122 76 +299 370 5 73 57 60 59 114 67 +300 370 5 75 56 56 54 94 58 +301 370 5 72 52 54 52 85 55 +302 370 5 72 52 50 51 84 51 +303 370 5 71 53 53 56 89 55 +304 370 5 71 55 55 57 94 57 +305 370 5 70 52 52 54 84 50 +306 370 5 70 53 48 61 72 43 +295 371 5 73 56 56 58 85 51 +296 371 5 68 51 50 57 80 48 +297 371 5 75 58 61 61 110 69 +298 371 5 75 59 61 59 114 72 +299 371 5 74 59 61 58 112 66 +300 371 5 72 55 54 55 102 62 +301 371 5 71 53 51 53 86 54 +302 371 5 71 52 50 52 74 46 +303 371 5 70 52 46 51 78 46 +304 371 5 65 49 44 56 71 41 +305 371 5 66 49 42 61 59 33 +306 371 5 70 52 48 62 65 38 +295 372 5 66 52 46 56 69 41 +296 372 5 67 49 44 60 62 33 +297 372 5 70 54 49 60 79 45 +298 372 5 70 52 52 58 85 51 +299 372 5 73 57 59 56 101 62 +300 372 5 73 56 58 54 98 59 +301 372 5 68 52 50 57 75 43 +302 372 5 69 52 49 59 69 41 +303 372 5 69 50 44 54 65 37 +304 372 5 67 49 42 61 57 29 +305 372 5 68 50 44 63 62 35 +306 372 5 71 54 52 61 84 50 +399 372 5 65 49 42 60 58 30 +400 372 5 69 51 42 63 57 32 +401 372 5 68 50 40 66 59 34 +402 372 5 70 51 42 69 68 34 +403 372 5 74 54 48 66 74 44 +404 372 5 78 59 63 65 105 62 +405 372 5 81 66 71 65 111 71 +406 372 5 71 52 48 53 70 45 +407 372 5 67 51 43 66 75 43 +408 372 5 69 56 48 67 75 41 +409 372 5 71 56 47 60 61 37 +295 373 5 68 50 41 62 60 29 +296 373 5 68 50 41 62 60 29 +297 373 5 71 52 46 62 63 36 +298 373 5 69 48 41 59 58 30 +299 373 5 67 54 53 60 83 51 +300 373 5 70 53 54 56 86 52 +301 373 5 72 52 49 58 75 43 +302 373 5 72 52 50 59 79 49 +303 373 5 69 52 47 57 63 37 +304 373 5 68 52 46 60 72 39 +305 373 5 72 55 56 59 96 59 +306 373 5 73 58 60 57 100 64 +399 373 5 68 49 42 62 56 30 +400 373 5 70 51 46 66 57 30 +401 373 5 69 51 42 63 54 31 +402 373 5 67 49 39 65 59 29 +403 373 5 73 55 51 65 68 39 +404 373 5 78 60 65 68 106 63 +405 373 5 78 66 71 65 125 78 +406 373 5 66 49 45 50 62 38 +407 373 5 67 50 46 62 62 37 +408 373 5 70 55 46 73 72 40 +409 373 5 71 57 48 78 73 39 +295 374 5 67 51 45 62 64 31 +296 374 5 67 51 45 62 66 36 +297 374 5 68 49 45 62 59 30 +298 374 5 69 49 46 60 63 36 +299 374 5 71 54 49 57 74 45 +300 374 5 72 57 57 61 87 52 +301 374 5 72 55 56 60 93 56 +302 374 5 71 50 46 58 73 43 +303 374 5 67 52 44 60 66 36 +304 374 5 68 53 51 57 81 49 +305 374 5 69 51 49 57 72 45 +306 374 5 73 53 50 59 81 48 +399 374 5 68 50 41 70 59 29 +400 374 5 72 53 46 66 63 38 +401 374 5 69 50 41 57 58 33 +402 374 5 70 51 44 73 69 35 +403 374 5 75 59 57 71 86 53 +404 374 5 78 61 64 63 97 58 +405 374 5 74 57 61 59 113 69 +406 374 5 68 50 43 53 62 35 +407 374 5 67 50 46 64 55 32 +408 374 5 73 61 51 82 87 49 +295 375 5 68 50 45 60 65 36 +296 375 5 67 50 44 61 66 38 +297 375 5 70 49 45 61 69 38 +298 375 5 68 50 44 60 60 34 +299 375 5 71 51 43 58 66 38 +300 375 5 72 57 55 62 92 56 +301 375 5 70 57 59 60 100 60 +302 375 5 68 50 48 55 77 44 +303 375 5 69 50 43 60 60 33 +304 375 5 67 52 45 59 59 33 +305 375 5 68 50 45 60 54 31 +306 375 5 71 50 51 60 75 44 +399 375 5 69 50 42 67 61 33 +400 375 5 72 54 49 60 58 35 +401 375 5 70 51 43 51 48 31 +402 375 5 72 51 45 66 69 38 +403 375 5 74 58 55 62 84 51 +404 375 5 72 56 53 51 76 50 +405 375 5 69 49 43 56 74 41 +406 375 5 70 50 44 57 59 36 +407 375 5 69 53 43 66 65 36 +408 375 5 72 59 51 72 80 43 +295 376 5 68 50 46 62 64 38 +296 376 5 67 50 44 61 66 38 +297 376 5 68 51 46 61 69 39 +298 376 5 67 50 46 58 68 39 +299 376 5 69 49 43 59 60 31 +300 376 5 69 53 50 63 78 47 +301 376 5 73 54 57 58 90 58 +302 376 5 69 50 48 54 70 39 +303 376 5 69 51 45 57 65 39 +304 376 5 69 51 44 57 65 39 +305 376 5 67 48 41 59 56 33 +306 376 5 70 51 49 61 71 44 +399 376 5 68 51 38 64 57 32 +400 376 5 68 52 43 61 60 32 +401 376 5 67 52 46 61 58 35 +402 376 5 69 54 46 65 64 37 +403 376 5 71 53 50 62 78 45 +404 376 5 70 54 48 49 56 38 +405 376 5 68 48 40 59 55 31 +406 376 5 70 51 45 57 54 29 +407 376 5 68 54 44 67 71 38 +408 376 5 71 59 52 73 86 46 +295 377 5 68 50 43 62 64 34 +296 377 5 70 50 44 61 65 37 +297 377 5 67 51 42 62 65 37 +298 377 5 64 51 43 60 64 35 +299 377 5 68 51 45 64 65 37 +300 377 5 69 54 54 66 83 49 +301 377 5 74 54 56 58 90 54 +302 377 5 71 51 45 56 66 39 +303 377 5 66 50 42 60 59 33 +304 377 5 70 50 44 59 63 37 +305 377 5 67 48 41 61 56 32 +306 377 5 71 51 47 64 67 40 +400 377 5 69 52 45 66 60 32 +401 377 5 69 54 48 65 72 41 +402 377 5 69 53 49 65 74 43 +403 377 5 71 53 50 62 78 45 +404 377 5 69 50 48 57 60 34 +405 377 5 69 53 44 62 62 35 +406 377 5 72 54 46 61 63 36 +407 377 5 68 58 50 69 72 40 +408 377 5 74 60 52 73 85 49 +295 378 5 67 50 42 62 61 30 +296 378 5 69 50 44 61 63 34 +297 378 5 67 50 40 62 61 35 +298 378 5 66 51 43 64 62 32 +299 378 5 69 53 51 64 78 47 +300 378 5 73 56 61 64 108 67 +301 378 5 73 55 58 58 102 62 +302 378 5 69 52 46 56 76 44 +303 378 5 65 49 40 62 55 29 +304 378 5 70 50 42 60 61 35 +305 378 5 69 49 43 62 60 35 +306 378 5 69 50 45 62 67 37 +400 378 5 74 59 53 69 71 44 +401 378 5 72 55 47 61 71 45 +402 378 5 69 51 45 56 61 36 +403 378 5 72 54 46 62 65 38 +404 378 5 76 58 53 64 67 41 +405 378 5 79 68 62 64 78 52 +406 378 5 72 59 54 69 78 49 +407 378 5 71 58 49 72 74 41 +408 378 5 76 61 57 63 80 47 +295 379 5 68 49 43 64 62 31 +296 379 5 69 50 42 64 64 37 +297 379 5 67 49 46 60 65 36 +298 379 5 69 52 48 62 81 46 +299 379 5 71 55 57 62 91 55 +300 379 5 75 55 62 58 99 61 +301 379 5 72 55 54 55 89 52 +302 379 5 69 52 46 56 76 44 +303 379 5 67 49 42 60 61 32 +304 379 5 67 47 40 62 54 32 +305 379 5 68 51 44 64 58 34 +306 379 5 70 51 47 66 71 38 +400 379 5 68 56 43 66 70 40 +401 379 5 68 52 44 71 68 39 +402 379 5 70 52 43 65 62 34 +403 379 5 68 51 46 58 57 34 +404 379 5 72 54 44 61 59 35 +405 379 5 72 57 50 63 76 48 +406 379 5 75 58 54 67 83 53 +407 379 5 76 61 55 74 83 46 +408 379 5 76 59 57 68 82 49 +295 380 5 68 52 47 58 76 42 +296 380 5 69 50 51 59 79 43 +297 380 5 68 53 52 58 85 51 +298 380 5 71 56 56 56 95 58 +299 380 5 73 53 55 56 91 54 +300 380 5 70 52 51 56 77 44 +301 380 5 68 51 46 56 63 36 +302 380 5 69 50 43 59 64 36 +303 380 5 66 49 41 63 55 28 +304 380 5 66 49 41 63 55 28 +305 380 5 67 51 44 67 64 38 +306 380 5 70 55 51 65 90 50 +400 380 5 69 51 43 61 65 34 +401 380 5 66 49 39 65 63 32 +402 380 5 66 51 41 73 73 38 +403 380 5 69 53 45 64 60 37 +404 380 5 70 52 50 58 64 36 +405 380 5 74 59 51 61 70 39 +406 380 5 77 64 63 74 89 55 +407 380 5 79 69 64 90 106 60 +408 380 5 81 68 65 84 105 68 +295 381 5 71 51 46 57 65 38 +296 381 5 70 52 49 57 78 45 +297 381 5 67 52 49 56 82 50 +298 381 5 68 53 51 56 80 47 +299 381 5 70 52 49 57 75 42 +300 381 5 68 49 45 57 68 40 +301 381 5 67 48 42 57 60 34 +302 381 5 66 49 42 60 55 30 +303 381 5 66 51 41 65 57 29 +304 381 5 69 53 47 67 75 44 +305 381 5 73 57 56 61 96 59 +306 381 5 74 55 61 55 94 57 +400 381 5 70 50 42 58 66 38 +401 381 5 67 49 41 59 58 31 +402 381 5 67 51 44 68 72 37 +403 381 5 71 56 49 76 82 47 +404 381 5 75 59 54 69 89 52 +405 381 5 73 56 52 59 78 46 +406 381 5 70 56 52 63 75 46 +407 381 5 73 57 53 77 87 47 +408 381 5 80 64 60 80 85 55 +116 382 5 69 51 48 60 72 40 +117 382 5 69 51 42 58 59 35 +118 382 5 69 52 42 63 58 31 +119 382 5 67 50 45 65 58 29 +120 382 5 67 49 44 59 59 31 +121 382 5 67 51 45 60 59 32 +122 382 5 70 52 45 58 63 38 +123 382 5 69 50 43 57 59 31 +124 382 5 70 51 46 62 69 39 +125 382 5 67 51 41 59 75 45 +126 382 5 70 54 47 54 64 39 +127 382 5 70 54 48 50 70 43 +400 382 5 69 52 45 56 72 40 +401 382 5 69 52 45 59 78 42 +402 382 5 72 54 48 66 80 43 +403 382 5 71 55 47 71 90 48 +404 382 5 73 58 52 67 92 53 +405 382 5 72 56 53 57 84 51 +406 382 5 70 54 46 50 56 34 +407 382 5 70 51 45 64 55 32 +408 382 5 70 53 46 65 50 29 +116 383 5 68 50 45 57 65 36 +117 383 5 70 52 45 57 62 37 +118 383 5 68 53 42 60 58 34 +119 383 5 67 52 44 64 62 33 +120 383 5 69 51 44 62 53 28 +121 383 5 68 49 43 62 56 29 +122 383 5 69 54 45 63 65 36 +123 383 5 69 52 44 61 59 33 +124 383 5 70 51 44 64 63 35 +125 383 5 69 50 45 57 72 41 +126 383 5 70 54 48 53 73 43 +127 383 5 72 58 55 59 71 42 +400 383 5 70 53 45 60 70 37 +401 383 5 69 52 48 57 78 44 +402 383 5 70 56 48 67 82 48 +403 383 5 71 55 49 69 84 48 +404 383 5 72 55 51 64 81 48 +405 383 5 74 56 50 58 79 47 +406 383 5 69 55 46 58 66 38 +407 383 5 65 52 43 60 67 41 +408 383 5 72 52 46 61 58 34 +116 384 5 67 52 44 62 58 31 +117 384 5 69 52 41 58 63 35 +118 384 5 70 52 43 61 60 36 +119 384 5 67 52 44 64 62 33 +120 384 5 67 50 45 62 60 34 +121 384 5 68 51 43 63 56 31 +122 384 5 70 52 43 60 65 37 +123 384 5 70 51 47 55 67 39 +124 384 5 68 50 45 57 60 33 +125 384 5 70 53 44 61 69 38 +126 384 5 72 56 47 60 78 46 +127 384 5 73 59 52 70 95 54 +400 384 5 71 54 44 60 73 40 +401 384 5 69 56 48 66 79 46 +402 384 5 70 55 50 60 75 42 +403 384 5 69 53 45 63 77 45 +404 384 5 70 53 43 60 68 41 +405 384 5 70 54 49 64 77 44 +406 384 5 71 58 52 62 73 48 +407 384 5 71 58 52 62 73 48 +408 384 5 76 60 56 63 70 42 +116 385 5 69 50 41 57 59 33 +117 385 5 69 50 43 58 60 32 +118 385 5 71 52 43 62 60 31 +119 385 5 68 50 43 64 61 33 +120 385 5 68 51 45 60 63 35 +121 385 5 67 50 41 59 57 31 +122 385 5 67 51 43 63 56 31 +123 385 5 67 51 45 58 65 37 +124 385 5 67 49 43 55 55 30 +125 385 5 69 52 48 63 66 38 +126 385 5 71 56 47 69 83 44 +127 385 5 68 53 46 63 83 48 +228 385 4 82 64 66 80 71 44 +229 385 4 72 60 56 83 63 37 +116 386 5 68 49 45 56 57 33 +117 386 5 68 49 45 56 57 33 +118 386 5 68 50 43 61 56 30 +119 386 5 67 52 45 64 60 33 +120 386 5 67 50 44 61 59 30 +121 386 5 68 52 45 60 60 33 +122 386 5 69 52 46 62 63 34 +123 386 5 66 51 45 63 64 37 +124 386 5 67 49 44 56 67 36 +125 386 5 72 54 47 70 78 43 +126 386 5 71 57 48 69 88 46 +127 386 5 70 51 43 55 65 38 +228 386 4 82 67 74 76 96 64 +229 386 4 78 68 67 85 112 68 +230 386 4 80 66 66 87 94 57 +231 386 4 76 61 60 86 82 48 +116 387 5 70 50 44 56 63 37 +117 387 5 70 52 42 61 62 36 +118 387 5 72 52 47 59 68 39 +119 387 5 68 50 45 61 59 33 +120 387 5 67 50 42 60 57 30 +121 387 5 68 52 47 65 67 37 +122 387 5 68 54 46 62 77 45 +123 387 5 70 55 48 56 71 42 +124 387 5 73 54 51 64 86 47 +125 387 5 72 54 47 70 78 43 +126 387 5 69 52 46 67 78 45 +127 387 5 67 51 46 55 72 43 +228 387 4 71 53 54 64 89 55 +229 387 4 78 63 60 70 94 58 +230 387 4 84 71 72 78 98 65 +231 387 4 85 73 77 76 87 62 +116 388 5 71 51 44 58 66 38 +117 388 5 71 55 47 63 74 44 +118 388 5 71 55 51 56 79 47 +119 388 5 68 50 45 59 62 34 +120 388 5 70 53 48 66 71 40 +121 388 5 70 53 48 57 76 44 +122 388 5 69 52 48 58 72 43 +123 388 5 71 55 50 68 80 45 +124 388 5 69 49 44 53 66 37 +125 388 5 68 51 45 60 69 41 +126 388 5 69 55 47 59 73 44 +127 388 5 70 58 47 62 80 44 +228 388 4 68 51 47 65 66 39 +229 388 4 78 61 57 74 78 43 +230 388 4 87 75 76 74 96 65 +231 388 4 89 79 82 85 100 68 +232 388 4 75 60 56 79 79 45 +116 389 5 68 51 47 58 72 41 +117 389 5 70 54 50 62 80 46 +118 389 5 70 52 49 53 76 44 +119 389 5 71 53 50 57 77 45 +120 389 5 72 54 49 59 77 46 +121 389 5 67 51 46 54 68 40 +122 389 5 69 54 49 67 80 44 +123 389 5 69 51 45 56 69 38 +124 389 5 67 46 41 50 52 30 +125 389 5 67 51 45 52 64 37 +126 389 5 69 55 45 60 70 41 +127 389 5 70 55 47 58 78 45 +228 389 4 73 57 57 70 81 47 +229 389 4 79 63 64 74 103 58 +230 389 4 81 71 71 77 102 63 +231 389 4 81 71 67 86 102 68 +232 389 4 84 72 71 85 86 53 +116 390 5 71 53 48 58 71 38 +117 390 5 70 53 48 56 77 46 +118 390 5 71 51 47 53 72 42 +119 390 5 69 54 50 54 78 47 +120 390 5 72 53 48 53 76 46 +121 390 5 69 55 50 62 78 46 +122 390 5 71 57 54 65 87 49 +123 390 5 69 49 44 44 67 37 +124 390 5 67 48 42 53 53 30 +125 390 5 67 49 44 56 57 33 +126 390 5 70 52 45 56 66 38 +127 390 5 70 53 45 56 73 43 +228 390 4 79 68 68 75 98 61 +229 390 4 83 72 72 76 106 68 +230 390 4 84 71 69 85 103 59 +231 390 4 81 71 67 91 104 66 +232 390 4 79 69 62 92 97 61 +116 391 5 72 54 47 57 78 46 +117 391 5 68 53 48 57 72 41 +118 391 5 71 50 46 57 67 39 +119 391 5 66 50 44 54 63 38 +120 391 5 69 51 44 60 64 39 +121 391 5 75 57 55 66 89 52 +122 391 5 71 53 52 51 81 48 +123 391 5 68 49 44 46 57 34 +124 391 5 69 51 45 55 58 35 +125 391 5 70 49 45 65 58 32 +126 391 5 69 50 43 55 61 34 +127 391 5 68 52 44 54 62 36 +228 391 4 78 66 64 82 98 64 +229 391 4 83 70 69 83 101 67 +230 391 4 83 71 67 89 102 62 +231 391 4 81 70 66 96 106 61 +232 391 4 80 69 64 91 102 64 +116 392 5 71 54 50 54 81 48 +117 392 5 70 54 48 59 68 39 +118 392 5 71 50 47 62 63 35 +119 392 5 67 50 40 58 54 29 +120 392 5 69 53 47 70 70 39 +121 392 5 72 54 53 55 80 48 +122 392 5 68 48 42 45 59 36 +123 392 5 69 48 41 54 55 32 +124 392 5 69 51 44 56 61 37 +125 392 5 70 51 43 60 58 34 +126 392 5 66 49 41 59 57 33 +127 392 5 66 49 43 56 54 30 +228 392 4 90 75 78 83 109 71 +229 392 4 79 68 62 93 96 61 +230 392 4 72 61 52 104 89 48 +231 392 4 75 59 52 92 82 42 +232 392 4 80 67 64 90 93 57 +117 393 5 71 53 55 64 81 47 +118 393 5 72 56 54 58 93 52 +119 393 5 68 50 42 65 61 33 +120 393 5 71 56 55 66 86 49 +121 393 5 68 50 48 45 67 43 +122 393 5 65 46 41 54 45 27 +123 393 5 68 49 43 62 59 30 +124 393 5 66 50 43 58 57 31 +125 393 5 68 50 40 55 55 30 +126 393 5 67 49 42 62 56 28 +127 393 5 66 49 42 63 58 33 +228 393 4 88 74 74 88 99 65 +229 393 4 75 62 58 89 80 52 +230 393 4 71 57 46 101 76 40 +231 393 4 70 54 41 104 82 41 +232 393 4 75 60 51 87 77 46 +267 393 4 76 61 57 63 75 50 +268 393 4 77 60 57 67 98 61 +117 394 5 75 57 61 62 109 65 +118 394 5 67 53 49 49 74 44 +119 394 5 70 52 50 51 76 48 +120 394 5 68 45 40 49 49 29 +121 394 5 68 49 42 62 55 28 +122 394 5 69 52 47 65 64 34 +123 394 5 67 50 46 61 55 29 +124 394 5 70 50 42 60 53 30 +125 394 5 69 50 41 60 52 30 +126 394 5 69 49 41 65 55 29 +127 394 5 66 50 45 63 60 32 +267 394 4 73 63 60 79 89 53 +268 394 4 81 69 66 84 107 65 +269 394 4 75 64 57 86 101 60 +270 394 4 78 64 57 90 96 52 +271 394 4 85 75 72 99 100 60 +272 394 4 84 76 72 96 99 60 +117 395 5 67 53 49 49 74 44 +118 395 5 69 48 42 56 50 29 +119 395 5 67 48 42 57 48 26 +120 395 5 69 52 46 67 67 35 +121 395 5 70 52 49 62 70 43 +122 395 5 71 52 48 57 66 40 +123 395 5 71 53 47 59 62 34 +124 395 5 70 51 43 64 59 32 +125 395 5 69 53 47 63 64 35 +126 395 5 69 52 45 58 62 36 +127 395 5 67 50 43 59 57 33 +267 395 4 80 67 66 86 91 55 +268 395 4 87 77 77 85 109 72 +269 395 4 82 70 67 89 100 59 +270 395 4 75 62 55 103 82 43 +271 395 4 70 59 48 109 89 45 +272 395 4 80 71 72 89 102 64 +273 395 4 90 80 91 72 104 76 +267 396 4 89 80 80 91 116 77 +268 396 4 89 80 80 91 116 77 +269 396 4 92 83 84 90 119 80 +270 396 4 86 73 69 96 94 57 +271 396 4 76 61 54 92 75 36 +272 396 4 79 65 67 75 97 60 +273 396 4 88 76 82 68 114 83 +267 397 4 85 75 76 87 108 70 +268 397 4 89 79 79 83 94 66 +269 397 4 101 87 89 68 97 73 +270 397 4 84 70 65 84 88 58 +271 397 4 73 59 50 92 83 47 +272 397 4 87 75 75 78 106 74 +273 397 4 80 67 66 80 93 60 +267 398 4 83 71 72 84 106 65 +268 398 4 85 74 72 85 102 64 +269 398 4 84 70 65 84 86 62 +270 398 4 72 59 51 90 84 54 +271 398 4 73 61 54 94 76 40 +272 398 4 86 71 68 79 92 58 +273 398 4 83 69 65 67 94 60 +186 399 6 76 64 69 76 94 54 +187 399 6 81 69 72 69 104 66 +188 399 6 76 62 63 68 86 51 +267 399 4 82 71 69 79 102 64 +268 399 4 85 74 72 85 102 64 +269 399 4 83 71 68 90 89 52 +270 399 4 76 61 54 94 79 42 +271 399 4 72 56 46 91 78 43 +272 399 4 75 60 52 84 78 47 +273 399 4 76 62 59 77 86 52 +185 400 6 74 59 62 60 76 46 +186 400 6 76 64 69 76 94 54 +187 400 6 72 57 56 49 75 41 +188 400 6 70 51 46 35 33 18 +267 400 4 81 70 68 81 89 56 +268 400 4 77 68 64 93 109 61 +269 400 4 85 71 70 94 95 61 +270 400 4 79 65 58 95 85 52 +271 400 4 86 75 73 90 93 61 +272 400 4 82 73 70 88 77 53 +273 400 4 73 58 53 86 71 38 +185 401 6 76 61 60 69 91 49 +186 401 6 68 48 40 25 24 16 +187 401 6 66 45 38 17 10 8 +188 401 6 66 46 33 13 10 10 +269 401 4 81 67 65 92 88 57 +270 401 4 86 72 70 85 89 67 +271 401 4 84 73 76 101 92 60 +272 401 4 84 69 70 84 81 55 +273 401 4 72 55 48 80 64 36 +274 401 4 79 64 58 85 80 50 +186 402 6 69 48 42 26 29 19 +271 402 4 75 61 55 100 87 46 +272 402 4 69 54 46 88 66 35 +273 402 4 70 54 41 86 64 34 +274 402 4 79 64 58 85 80 50 274 403 4 79 67 64 103 103 56 \ No newline at end of file diff --git a/pyspatialml/datasets/meuse.py b/pyspatialml/datasets/meuse.py index b88d900..5efe2d0 100644 --- a/pyspatialml/datasets/meuse.py +++ b/pyspatialml/datasets/meuse.py @@ -1,29 +1,29 @@ -import os - -chnl_dist = os.path.join(os.path.dirname(__file__), 'chnl_dist.tif') -dem = os.path.join(os.path.dirname(__file__), 'dem.tif') -dist = os.path.join(os.path.dirname(__file__), 'dist.tif') -ffreq = os.path.join(os.path.dirname(__file__), 'ffreq.tif') -landimg2 = os.path.join(os.path.dirname(__file__), 'landimg2.tif') -landimg3 = os.path.join(os.path.dirname(__file__), 'landimg3.tif') -landimg4 = os.path.join(os.path.dirname(__file__), 'landimg4.tif') -mrvbf = os.path.join(os.path.dirname(__file__), 'mrvbf.tif') -rsp = os.path.join(os.path.dirname(__file__), 'rsp.tif') -slope = os.path.join(os.path.dirname(__file__), 'slope.tif') -soil = os.path.join(os.path.dirname(__file__), 'soil.tif') -twi = os.path.join(os.path.dirname(__file__), 'twi.tif') -meuse = os.path.join(os.path.dirname(__file__), 'meuse.shp') - -predictors = [ - chnl_dist, - dem, - dist, - ffreq, - landimg2, - landimg3, - landimg4, - mrvbf, - rsp, - slope, - soil, - twi] +import os + +chnl_dist = os.path.join(os.path.dirname(__file__), 'chnl_dist.tif') +dem = os.path.join(os.path.dirname(__file__), 'dem.tif') +dist = os.path.join(os.path.dirname(__file__), 'dist.tif') +ffreq = os.path.join(os.path.dirname(__file__), 'ffreq.tif') +landimg2 = os.path.join(os.path.dirname(__file__), 'landimg2.tif') +landimg3 = os.path.join(os.path.dirname(__file__), 'landimg3.tif') +landimg4 = os.path.join(os.path.dirname(__file__), 'landimg4.tif') +mrvbf = os.path.join(os.path.dirname(__file__), 'mrvbf.tif') +rsp = os.path.join(os.path.dirname(__file__), 'rsp.tif') +slope = os.path.join(os.path.dirname(__file__), 'slope.tif') +soil = os.path.join(os.path.dirname(__file__), 'soil.tif') +twi = os.path.join(os.path.dirname(__file__), 'twi.tif') +meuse = os.path.join(os.path.dirname(__file__), 'meuse.shp') + +predictors = [ + chnl_dist, + dem, + dist, + ffreq, + landimg2, + landimg3, + landimg4, + mrvbf, + rsp, + slope, + soil, + twi] diff --git a/pyspatialml/datasets/nc.py b/pyspatialml/datasets/nc.py index 008413e..50f6d17 100644 --- a/pyspatialml/datasets/nc.py +++ b/pyspatialml/datasets/nc.py @@ -1,14 +1,14 @@ -import os - -band1 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_10.tif') -band2 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_20.tif') -band3 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_30.tif') -band4 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_40.tif') -band5 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_50.tif') -band7 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_70.tif') -strata = os.path.join(os.path.dirname(__file__), 'strata.tif') -multiband = os.path.join(os.path.dirname(__file__), 'landsat_multiband.tif') -labelled_pixels = os.path.join(os.path.dirname(__file__), 'landsat96_labelled_pixels.tif') -points = os.path.join(os.path.dirname(__file__), 'landsat96_points.shp') -polygons = os.path.join(os.path.dirname(__file__), 'landsat96_polygons.shp') -extracted_pixels = os.path.join(os.path.dirname(__file__), 'extracted_pixels.txt') +import os + +band1 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_10.tif') +band2 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_20.tif') +band3 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_30.tif') +band4 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_40.tif') +band5 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_50.tif') +band7 = os.path.join(os.path.dirname(__file__), 'lsat7_2000_70.tif') +strata = os.path.join(os.path.dirname(__file__), 'strata.tif') +multiband = os.path.join(os.path.dirname(__file__), 'landsat_multiband.tif') +labelled_pixels = os.path.join(os.path.dirname(__file__), 'landsat96_labelled_pixels.tif') +points = os.path.join(os.path.dirname(__file__), 'landsat96_points.shp') +polygons = os.path.join(os.path.dirname(__file__), 'landsat96_polygons.shp') +extracted_pixels = os.path.join(os.path.dirname(__file__), 'extracted_pixels.txt') diff --git a/pyspatialml/locindexer.py b/pyspatialml/locindexer.py index 20a6d60..72552dc 100644 --- a/pyspatialml/locindexer.py +++ b/pyspatialml/locindexer.py @@ -1,239 +1,239 @@ -import pandas as pd -from collections.abc import MutableMapping -from . import raster, rasterlayer - - -class _LocIndexer(MutableMapping): - """Access pyspatialml.RasterLayer objects by using a key. - - Represents a structure similar to a dict but allows access using a - list of keys (not just a single key). - """ - - def __init__(self, *args, **kw): - self.__dict__.update(*args, **kw) - - def __getitem__(self, key): - """Defines the subset method for the _LocIndexer. Allows the - contained RasterLayer objects to be subset using a either - single, or multiple labels corresponding to the names of each - RasterLayer. - - Parameters - ---------- - key : a single str, or a list of str - - Returns - ------- - Returns a RasterLayer if only a single item is subset, or a - Raster if multiple items are subset. - - """ - if isinstance(key, str): - new = self.__dict__[key] - else: - selected = [] - for i in key: - if i in self.names is False: - raise KeyError("key not present in Raster object") - else: - selected.append(self.__dict__[i]) - new = raster.Raster(selected) - return new - - def __setitem__(self, key, value): - """Allows a RasterLayer object to be assigned to a name within - a Raster object. This automatically updates the indexer with - the layer, and adds the RasterLayer's name as an attribute in - the Raster. - - Parameters - ---------- - key : str - The key to use for the assignment: - - value : pyspatialml.RasterLayer - A single RasterLayer object to assign to the key. - """ - if isinstance(value, rasterlayer.RasterLayer): - self.__dict__[key] = value - else: - raise ValueError("value is not a RasterLayer object") - - def __iter__(self): - """Iterates through keys""" - return iter(self._keys) - - def __len__(self): - """Number of layers in the indexer""" - return len(self.__dict__) - len(self._internal) - - def __delitem__(self, key): - """Delete a key:value pair""" - self.__dict__.pop(key) - - def __repr__(self): - print("Raster Object Containing {n} Layers".format(n=self.count)) - meta = pd.DataFrame( - { - "attribute": ["names", "files", "rows", "cols", "res", "nodatavals"], - "values": [ - list(self.names), - self.files, - self.shape[0], - self.shape[1], - self.res, - self.nodatavals, - ], - } - ) - print(meta) - - return "" - - @property - def _keys(self): - d = {k: v for (k, v) in self.__dict__.items() if k not in self._internal} - return d.keys() - - def _rename_inplace(self, old, new): - """Rename a RasterLayer from `old` to `new. This method renames - the layer in the indexer and renames the equivalent attribute - in the parent Raster object. - - Parameters - ---------- - old : str - Name of the existing key. - - new : str - Name to use to rename the existing key. - """ - # rename the index by rebuilding the dict - original_keys = list(self.__dict__.keys()) - new_keys = [new if i == old else i for i in original_keys] - new_dict = dict(zip(new_keys, self.__dict__.values())) - self.__dict__ = new_dict - - # update the internal name of a RasterLayer - self.__dict__[new].name = new - - @property - def loc(self): - """Alias for the getter method of the indexer""" - return self - - @loc.setter - def loc(self, key, value): - """Alias for the setter method if the indexer""" - self.__dict__[key] = value - - @property - def iloc(self): - """Reference to an integer-based indexer to access the layers - by integer position rather than label""" - return _iLocIndexer(self) - - @property - def names(self): - return self._keys - - @names.setter - def names(self, value): - if isinstance(value, str): - value = [value] - - if len(value) != self.count: - raise ValueError( - "Length of new names has to equal the number of layers in the Raster" - ) - - renamer = {old: new for (old, new) in zip(self.names, value)} - self.rename(renamer, in_place=True) - - -class _iLocIndexer(object): - """Access pyspatialml.RasterLayer objects using an index position - - A wrapper around _LocIndexer to enable integer-based indexing of - the items in the OrderedDict. Setting and getting items can occur - using a single index position, a list or tuple of positions, or a - slice of positions. - - Methods - ------- - __getitem__ : index - Subset RasterLayers using an integer index, a slice of indexes, - or a list/tuple of indexes. Returns a RasterLayer is a single - item is subset, or a Raster if multiple layers are subset. - - __setitem__ : index, value - Assign a RasterLayer to a index position within the indexer. - The index can be a single integer position, a slice of - positions, or a list/tuple of positions. This method also - updates the parent Raster object's attributes with the names - of the new RasterLayers that were passed as the value. - """ - - def __init__(self, loc_indexer): - """Initiate a _iLocIndexer - - Parameters - ---------- - loc_indexer : pyspatialml.raster._LocIndexer - An instance of a _LocIndexer. - """ - self._index = loc_indexer - - def __setitem__(self, index, value): - if isinstance(index, int): - key = list(self._index.keys())[index] - self._index[key] = value - - if isinstance(index, slice): - start = index.start - stop = index.stop - step = index.step - - if start is None: - start = 0 - if stop is None: - stop = self.count - if step is None: - step = 1 - - index = list(range(start, stop, step)) - - if isinstance(index, (list, tuple)): - for i, v in zip(index, value): - key = list(self._index.keys())[i] - self._index[key] = v - - def __getitem__(self, index): - if isinstance(index, int): - key = list(self._index.keys())[index] - selected = self._index[key] - - if isinstance(index, slice): - start = index.start - stop = index.stop - step = index.step - - if start is None: - start = 0 - - if stop is None: - stop = self.count - - if step is None: - step = 1 - - index = list(range(start, stop, step)) - - if isinstance(index, (list, tuple)): - key = [] - for i in index: - key.append(list(self._index.keys())[i]) - selected = raster.Raster([self._index[k] for k in key]) - - return selected +import pandas as pd +from collections.abc import MutableMapping +from . import raster, rasterlayer + + +class _LocIndexer(MutableMapping): + """Access pyspatialml.RasterLayer objects by using a key. + + Represents a structure similar to a dict but allows access using a + list of keys (not just a single key). + """ + + def __init__(self, *args, **kw): + self.__dict__.update(*args, **kw) + + def __getitem__(self, key): + """Defines the subset method for the _LocIndexer. Allows the + contained RasterLayer objects to be subset using a either + single, or multiple labels corresponding to the names of each + RasterLayer. + + Parameters + ---------- + key : a single str, or a list of str + + Returns + ------- + Returns a RasterLayer if only a single item is subset, or a + Raster if multiple items are subset. + + """ + if isinstance(key, str): + new = self.__dict__[key] + else: + selected = [] + for i in key: + if i in self.names is False: + raise KeyError("key not present in Raster object") + else: + selected.append(self.__dict__[i]) + new = raster.Raster(selected) + return new + + def __setitem__(self, key, value): + """Allows a RasterLayer object to be assigned to a name within + a Raster object. This automatically updates the indexer with + the layer, and adds the RasterLayer's name as an attribute in + the Raster. + + Parameters + ---------- + key : str + The key to use for the assignment: + + value : pyspatialml.RasterLayer + A single RasterLayer object to assign to the key. + """ + if isinstance(value, rasterlayer.RasterLayer): + self.__dict__[key] = value + else: + raise ValueError("value is not a RasterLayer object") + + def __iter__(self): + """Iterates through keys""" + return iter(self._keys) + + def __len__(self): + """Number of layers in the indexer""" + return len(self.__dict__) - len(self._internal) + + def __delitem__(self, key): + """Delete a key:value pair""" + self.__dict__.pop(key) + + def __repr__(self): + print("Raster Object Containing {n} Layers".format(n=self.count)) + meta = pd.DataFrame( + { + "attribute": ["names", "files", "rows", "cols", "res", "nodatavals"], + "values": [ + list(self.names), + self.files, + self.shape[0], + self.shape[1], + self.res, + self.nodatavals, + ], + } + ) + print(meta) + + return "" + + @property + def _keys(self): + d = {k: v for (k, v) in self.__dict__.items() if k not in self._internal} + return d.keys() + + def _rename_inplace(self, old, new): + """Rename a RasterLayer from `old` to `new. This method renames + the layer in the indexer and renames the equivalent attribute + in the parent Raster object. + + Parameters + ---------- + old : str + Name of the existing key. + + new : str + Name to use to rename the existing key. + """ + # rename the index by rebuilding the dict + original_keys = list(self.__dict__.keys()) + new_keys = [new if i == old else i for i in original_keys] + new_dict = dict(zip(new_keys, self.__dict__.values())) + self.__dict__ = new_dict + + # update the internal name of a RasterLayer + self.__dict__[new].name = new + + @property + def loc(self): + """Alias for the getter method of the indexer""" + return self + + @loc.setter + def loc(self, key, value): + """Alias for the setter method if the indexer""" + self.__dict__[key] = value + + @property + def iloc(self): + """Reference to an integer-based indexer to access the layers + by integer position rather than label""" + return _iLocIndexer(self) + + @property + def names(self): + return self._keys + + @names.setter + def names(self, value): + if isinstance(value, str): + value = [value] + + if len(value) != self.count: + raise ValueError( + "Length of new names has to equal the number of layers in the Raster" + ) + + renamer = {old: new for (old, new) in zip(self.names, value)} + self.rename(renamer, in_place=True) + + +class _iLocIndexer(object): + """Access pyspatialml.RasterLayer objects using an index position + + A wrapper around _LocIndexer to enable integer-based indexing of + the items in the OrderedDict. Setting and getting items can occur + using a single index position, a list or tuple of positions, or a + slice of positions. + + Methods + ------- + __getitem__ : index + Subset RasterLayers using an integer index, a slice of indexes, + or a list/tuple of indexes. Returns a RasterLayer is a single + item is subset, or a Raster if multiple layers are subset. + + __setitem__ : index, value + Assign a RasterLayer to a index position within the indexer. + The index can be a single integer position, a slice of + positions, or a list/tuple of positions. This method also + updates the parent Raster object's attributes with the names + of the new RasterLayers that were passed as the value. + """ + + def __init__(self, loc_indexer): + """Initiate a _iLocIndexer + + Parameters + ---------- + loc_indexer : pyspatialml.raster._LocIndexer + An instance of a _LocIndexer. + """ + self._index = loc_indexer + + def __setitem__(self, index, value): + if isinstance(index, int): + key = list(self._index.keys())[index] + self._index[key] = value + + if isinstance(index, slice): + start = index.start + stop = index.stop + step = index.step + + if start is None: + start = 0 + if stop is None: + stop = self.count + if step is None: + step = 1 + + index = list(range(start, stop, step)) + + if isinstance(index, (list, tuple)): + for i, v in zip(index, value): + key = list(self._index.keys())[i] + self._index[key] = v + + def __getitem__(self, index): + if isinstance(index, int): + key = list(self._index.keys())[index] + selected = self._index[key] + + if isinstance(index, slice): + start = index.start + stop = index.stop + step = index.step + + if start is None: + start = 0 + + if stop is None: + stop = self.count + + if step is None: + step = 1 + + index = list(range(start, stop, step)) + + if isinstance(index, (list, tuple)): + key = [] + for i in index: + key.append(list(self._index.keys())[i]) + selected = raster.Raster([self._index[k] for k in key]) + + return selected diff --git a/pyspatialml/preprocessing.py b/pyspatialml/preprocessing.py index 6177c0e..67610c1 100644 --- a/pyspatialml/preprocessing.py +++ b/pyspatialml/preprocessing.py @@ -1,293 +1,293 @@ -from copy import deepcopy - -import numpy as np -import rasterio -from scipy import ndimage - -from .raster import Raster - - -def one_hot_encode(layer, file_path, categories=None, driver="GTiff"): - """One-hot encoding of a RasterLayer. - - Parameters - ---------- - layer : pyspatialml.RasterLayer - Containing categories to perform one-hot encoding on. - - file_path : str - File path to save one-hot encoded raster. - - categories : list, ndarray, optional - Optional list of categories to extract. Default performs one-hot - encoding on all categorical values in the input layer. - - driver : str, options. Default is 'GTiff' - GDAL-compatible driver. - - Returns - ------- - pyspatialml.Raster - Each categorical value is encoded as a layer with a Raster object. - """ - arr = layer.read(masked=True) - - if categories is None: - categories = np.unique(arr) - categories = categories[~categories.mask] - categories = categories.data.astype("int32") - - arr_ohe = np.ma.zeros((len(categories), arr.shape[0], arr.shape[1]), dtype="int32") - names = [] - prefix = layer.names[0] - - for i, cat in enumerate(categories): - enc = deepcopy(arr) - enc[enc != cat] = 0 - enc[enc == cat] = 1 - arr_ohe[i, :, :] = enc - - names.append("_".join([prefix, "cat", str(cat)])) - - # create new stack - meta = deepcopy(layer.ds.meta) - meta["driver"] = driver - meta["nodata"] = -99999 - meta["count"] = arr_ohe.shape[0] - meta["dtype"] = "int32" - - with rasterio.open(file_path, mode="w", **meta) as dst: - dst.write(arr_ohe) - - new_raster = Raster(file_path) - new_raster.rename({old: new for old, new in zip(new_raster.names, names)}) - - return new_raster - - -def xy_coordinates(layer, file_path, driver="GTiff"): - """ - Fill 2d arrays with their x,y indices. - - Parameters - ---------- - layer : pyspatialml.RasterLayer, or rasterio.DatasetReader - RasterLayer to use as a template. - - file_path : str - File path to save to the resulting Raster object.s - - driver : str, options. Default is 'GTiff' - GDAL driver to use to save raster. - - Returns - ------- - pyspatialml.Raster object - """ - - arr = np.zeros(layer.shape, dtype=np.float32) - arr = arr[np.newaxis, :, :] - xyarrays = np.repeat(arr[0:1, :, :], 2, axis=0) - xx, xy = np.meshgrid(np.arange(arr.shape[2]), np.arange(arr.shape[1])) - xyarrays[0, :, :] = xx - xyarrays[1, :, :] = xy - - # create new stack - meta = deepcopy(layer.meta) - meta["driver"] = driver - meta["count"] = 2 - meta["dtype"] = xyarrays.dtype - - with rasterio.open(file_path, "w", **meta) as dst: - dst.write(xyarrays) - - new_raster = Raster(file_path) - names = ["x_coordinates", "y_coordinates"] - new_raster.rename( - {old: new for old, new in zip(new_raster.names, names)}, - in_place=True - ) - - return new_raster - - -def rotated_coordinates(layer, file_path, n_angles=8, driver="GTiff"): - """Generate 2d arrays with n_angles rotated coordinates. - - Parameters - ---------- - layer : pyspatialml.RasterLayer, or rasterio.DatasetReader - RasterLayer to use as a template. - - n_angles : int, optional. Default is 8 - Number of angles to rotate coordinate system by. - - driver : str, optional. Default is 'GTiff' - GDAL driver to use to save raster. - - Returns - ------- - pyspatialml.Raster - """ - # define x and y grid dimensions - xmin, ymin, xmax, ymax = 0, 0, layer.shape[1], layer.shape[0] - x_range = np.arange(start=xmin, stop=xmax, step=1) - y_range = np.arange(start=ymin, stop=ymax, step=1, dtype=np.float32) - - X_var, Y_var, _ = np.meshgrid(x_range, y_range, n_angles) - angles = np.deg2rad(np.linspace(0, 180, n_angles, endpoint=False)) - grids_directional = X_var + np.tan(angles) * Y_var - - # reorder to band, row, col order - grids_directional = grids_directional.transpose((2, 0, 1)) - - # create new stack - meta = deepcopy(layer.meta) - meta["driver"] = driver - meta["count"] = n_angles - meta["dtype"] = grids_directional.dtype - with rasterio.open(file_path, "w", **meta) as dst: - dst.write(grids_directional) - - new_raster = Raster(file_path) - names = ["angle_" + str(i + 1) for i in range(n_angles)] - new_raster.rename({old: new for old, new in zip(new_raster.names, names)}, - in_place=True) - - return new_raster - - -def distance_to_corners(layer, file_path, driver="GTiff"): - """Generate buffer distances to corner and centre coordinates of raster - extent. - - Parameters - ---------- - layer : pyspatialml.RasterLayer, or rasterio.DatasetReader - - file_path : str - File path to save to the resulting Raster object - - driver : str, optional. Default is 'GTiff' - GDAL driver to use to save raster. - - Returns - ------- - pyspatialml.Raster object - """ - - names = ["top_left", "top_right", "bottom_left", "bottom_right", "centre_indices"] - - rows = np.asarray( - [0, 0, layer.shape[0] - 1, layer.shape[0] - 1, int(layer.shape[0] / 2)] - ) - cols = np.asarray( - [0, layer.shape[1] - 1, 0, layer.shape[1] - 1, int(layer.shape[1] / 2)] - ) - - # euclidean distances - arr = _grid_distance(layer.shape, rows, cols) - - # create new stack - meta = deepcopy(layer.meta) - meta["driver"] = driver - meta["count"] = 5 - meta["dtype"] = arr.dtype - - with rasterio.open(file_path, "w", **meta) as dst: - dst.write(arr) - - new_raster = Raster(file_path) - new_raster.rename({old: new for old, new in zip(new_raster.names, names)}, - in_place=True) - - return new_raster - - -def _grid_distance(shape, rows, cols): - """Generate buffer distances to x,y coordinates. - - Parameters - ---------- - shape : tuple - shape of numpy array (rows, cols) to create buffer distances within. - rows : 1d numpy array - array of row indexes. - cols : 1d numpy array - array of column indexes. - - Returns - ------- - ndarray - 3d numpy array of euclidean grid distances to each x,y coordinate pair - [band, row, col]. - """ - - # create buffer distances - grids_buffers = np.zeros((shape[0], shape[1], rows.shape[0]), dtype=np.float32) - - for i, (y, x) in enumerate(zip(rows, cols)): - # create 2d array (image) with pick indexes set to z - point_arr = np.zeros((shape[0], shape[1])) - point_arr[y, x] = 1 - buffer = ndimage.morphology.distance_transform_edt(1 - point_arr) - grids_buffers[:, :, i] = buffer - - # reorder to band, row, column - grids_buffers = grids_buffers.transpose((2, 0, 1)) - - return grids_buffers - - -def distance_to_samples(layer, file_path, rows, cols, driver="GTiff"): - """Generate buffer distances to x,y coordinates. - - Parameters - ---------- - layer : pyspatialml.RasterLayer, or rasterio.DatasetReader - RasterLayer to use as a template. - - file_path : str - File path to save to the resulting Raster object. - - rows : 1d numpy array - array of row indexes. - - cols : 1d numpy array - array of column indexes. - - driver : str, default='GTiff' - GDAL driver to use to save raster. - - Returns - ------- - pyspatialml.Raster object - """ - # some checks - if isinstance(rows, list): - rows = np.asarray(rows) - - if isinstance(cols, list): - cols = np.asarray(cols) - - if rows.shape != cols.shape: - raise ValueError("rows and cols must have same dimensions") - - shape = layer.shape - arr = _grid_distance(shape, rows, cols) - - # create new stack - meta = deepcopy(layer.meta) - meta["driver"] = driver - meta["count"] = arr.shape[0] - meta["dtype"] = arr.dtype - - with rasterio.open(file_path, "w", **meta) as dst: - dst.write(arr) - - names = ["dist_sample" + str(i + 1) for i in range(len(rows))] - new_raster = Raster(file_path) - new_raster.rename({old: new for old, new in zip(new_raster.names, names)}, - in_place=True) - - return new_raster +from copy import deepcopy + +import numpy as np +import rasterio +from scipy import ndimage + +from .raster import Raster + + +def one_hot_encode(layer, file_path, categories=None, driver="GTiff"): + """One-hot encoding of a RasterLayer. + + Parameters + ---------- + layer : pyspatialml.RasterLayer + Containing categories to perform one-hot encoding on. + + file_path : str + File path to save one-hot encoded raster. + + categories : list, ndarray, optional + Optional list of categories to extract. Default performs one-hot + encoding on all categorical values in the input layer. + + driver : str, options. Default is 'GTiff' + GDAL-compatible driver. + + Returns + ------- + pyspatialml.Raster + Each categorical value is encoded as a layer with a Raster object. + """ + arr = layer.read(masked=True) + + if categories is None: + categories = np.unique(arr) + categories = categories[~categories.mask] + categories = categories.data.astype("int32") + + arr_ohe = np.ma.zeros((len(categories), arr.shape[0], arr.shape[1]), dtype="int32") + names = [] + prefix = layer.names[0] + + for i, cat in enumerate(categories): + enc = deepcopy(arr) + enc[enc != cat] = 0 + enc[enc == cat] = 1 + arr_ohe[i, :, :] = enc + + names.append("_".join([prefix, "cat", str(cat)])) + + # create new stack + meta = deepcopy(layer.ds.meta) + meta["driver"] = driver + meta["nodata"] = -99999 + meta["count"] = arr_ohe.shape[0] + meta["dtype"] = "int32" + + with rasterio.open(file_path, mode="w", **meta) as dst: + dst.write(arr_ohe) + + new_raster = Raster(file_path) + new_raster.rename({old: new for old, new in zip(new_raster.names, names)}) + + return new_raster + + +def xy_coordinates(layer, file_path, driver="GTiff"): + """ + Fill 2d arrays with their x,y indices. + + Parameters + ---------- + layer : pyspatialml.RasterLayer, or rasterio.DatasetReader + RasterLayer to use as a template. + + file_path : str + File path to save to the resulting Raster object.s + + driver : str, options. Default is 'GTiff' + GDAL driver to use to save raster. + + Returns + ------- + pyspatialml.Raster object + """ + + arr = np.zeros(layer.shape, dtype=np.float32) + arr = arr[np.newaxis, :, :] + xyarrays = np.repeat(arr[0:1, :, :], 2, axis=0) + xx, xy = np.meshgrid(np.arange(arr.shape[2]), np.arange(arr.shape[1])) + xyarrays[0, :, :] = xx + xyarrays[1, :, :] = xy + + # create new stack + meta = deepcopy(layer.meta) + meta["driver"] = driver + meta["count"] = 2 + meta["dtype"] = xyarrays.dtype + + with rasterio.open(file_path, "w", **meta) as dst: + dst.write(xyarrays) + + new_raster = Raster(file_path) + names = ["x_coordinates", "y_coordinates"] + new_raster.rename( + {old: new for old, new in zip(new_raster.names, names)}, + in_place=True + ) + + return new_raster + + +def rotated_coordinates(layer, file_path, n_angles=8, driver="GTiff"): + """Generate 2d arrays with n_angles rotated coordinates. + + Parameters + ---------- + layer : pyspatialml.RasterLayer, or rasterio.DatasetReader + RasterLayer to use as a template. + + n_angles : int, optional. Default is 8 + Number of angles to rotate coordinate system by. + + driver : str, optional. Default is 'GTiff' + GDAL driver to use to save raster. + + Returns + ------- + pyspatialml.Raster + """ + # define x and y grid dimensions + xmin, ymin, xmax, ymax = 0, 0, layer.shape[1], layer.shape[0] + x_range = np.arange(start=xmin, stop=xmax, step=1) + y_range = np.arange(start=ymin, stop=ymax, step=1, dtype=np.float32) + + X_var, Y_var, _ = np.meshgrid(x_range, y_range, n_angles) + angles = np.deg2rad(np.linspace(0, 180, n_angles, endpoint=False)) + grids_directional = X_var + np.tan(angles) * Y_var + + # reorder to band, row, col order + grids_directional = grids_directional.transpose((2, 0, 1)) + + # create new stack + meta = deepcopy(layer.meta) + meta["driver"] = driver + meta["count"] = n_angles + meta["dtype"] = grids_directional.dtype + with rasterio.open(file_path, "w", **meta) as dst: + dst.write(grids_directional) + + new_raster = Raster(file_path) + names = ["angle_" + str(i + 1) for i in range(n_angles)] + new_raster.rename({old: new for old, new in zip(new_raster.names, names)}, + in_place=True) + + return new_raster + + +def distance_to_corners(layer, file_path, driver="GTiff"): + """Generate buffer distances to corner and centre coordinates of raster + extent. + + Parameters + ---------- + layer : pyspatialml.RasterLayer, or rasterio.DatasetReader + + file_path : str + File path to save to the resulting Raster object + + driver : str, optional. Default is 'GTiff' + GDAL driver to use to save raster. + + Returns + ------- + pyspatialml.Raster object + """ + + names = ["top_left", "top_right", "bottom_left", "bottom_right", "centre_indices"] + + rows = np.asarray( + [0, 0, layer.shape[0] - 1, layer.shape[0] - 1, int(layer.shape[0] / 2)] + ) + cols = np.asarray( + [0, layer.shape[1] - 1, 0, layer.shape[1] - 1, int(layer.shape[1] / 2)] + ) + + # euclidean distances + arr = _grid_distance(layer.shape, rows, cols) + + # create new stack + meta = deepcopy(layer.meta) + meta["driver"] = driver + meta["count"] = 5 + meta["dtype"] = arr.dtype + + with rasterio.open(file_path, "w", **meta) as dst: + dst.write(arr) + + new_raster = Raster(file_path) + new_raster.rename({old: new for old, new in zip(new_raster.names, names)}, + in_place=True) + + return new_raster + + +def _grid_distance(shape, rows, cols): + """Generate buffer distances to x,y coordinates. + + Parameters + ---------- + shape : tuple + shape of numpy array (rows, cols) to create buffer distances within. + rows : 1d numpy array + array of row indexes. + cols : 1d numpy array + array of column indexes. + + Returns + ------- + ndarray + 3d numpy array of euclidean grid distances to each x,y coordinate pair + [band, row, col]. + """ + + # create buffer distances + grids_buffers = np.zeros((shape[0], shape[1], rows.shape[0]), dtype=np.float32) + + for i, (y, x) in enumerate(zip(rows, cols)): + # create 2d array (image) with pick indexes set to z + point_arr = np.zeros((shape[0], shape[1])) + point_arr[y, x] = 1 + buffer = ndimage.morphology.distance_transform_edt(1 - point_arr) + grids_buffers[:, :, i] = buffer + + # reorder to band, row, column + grids_buffers = grids_buffers.transpose((2, 0, 1)) + + return grids_buffers + + +def distance_to_samples(layer, file_path, rows, cols, driver="GTiff"): + """Generate buffer distances to x,y coordinates. + + Parameters + ---------- + layer : pyspatialml.RasterLayer, or rasterio.DatasetReader + RasterLayer to use as a template. + + file_path : str + File path to save to the resulting Raster object. + + rows : 1d numpy array + array of row indexes. + + cols : 1d numpy array + array of column indexes. + + driver : str, default='GTiff' + GDAL driver to use to save raster. + + Returns + ------- + pyspatialml.Raster object + """ + # some checks + if isinstance(rows, list): + rows = np.asarray(rows) + + if isinstance(cols, list): + cols = np.asarray(cols) + + if rows.shape != cols.shape: + raise ValueError("rows and cols must have same dimensions") + + shape = layer.shape + arr = _grid_distance(shape, rows, cols) + + # create new stack + meta = deepcopy(layer.meta) + meta["driver"] = driver + meta["count"] = arr.shape[0] + meta["dtype"] = arr.dtype + + with rasterio.open(file_path, "w", **meta) as dst: + dst.write(arr) + + names = ["dist_sample" + str(i + 1) for i in range(len(rows))] + new_raster = Raster(file_path) + new_raster.rename({old: new for old, new in zip(new_raster.names, names)}, + in_place=True) + + return new_raster diff --git a/pyspatialml/raster.py b/pyspatialml/raster.py index 1ed1e58..85a90a9 100644 --- a/pyspatialml/raster.py +++ b/pyspatialml/raster.py @@ -1,2691 +1,2691 @@ -import os -import tempfile -from collections import namedtuple -from collections.abc import ValuesView -from functools import partial -from typing import Tuple -import affine - -import geopandas as gpd -import numpy as np -import pandas as pd -import rasterio -import rasterio.mask -import rasterio.plot -from rasterio import features -from rasterio.io import MemoryFile -from rasterio.sample import sample_gen -from rasterio.warp import calculate_default_transform, reproject -from rasterio.windows import Window -from rasterio.transform import rowcol -from shapely.geometry import Point -from tqdm import tqdm -from collections import Counter - -from ._plotting import RasterPlotMixin -from ._prediction import ( - predict_multioutput, - predict_output, - predict_prob, - stack_constants, -) -from ._utils import get_nodata_value -from .rasterlayer import RasterLayer -from ._rasterstats import RasterStatsMixin -from .locindexer import _LocIndexer - - -class Raster(_LocIndexer, RasterStatsMixin, RasterPlotMixin): - """Creates a collection of file-based GDAL-supported raster - datasets that share a common coordinate reference system and - geometry. - - Raster objects encapsulate RasterLayer objects, which represent - single band raster datasets that can physically be represented by - either separate single-band raster files, multi-band raster files, - or any combination of individual bands from multi-band raster and - single-band raster datasets. - - Attributes - ---------- - files : list - A list of the raster dataset files that are used in the Raster. - This does not have to be the same length as the number of - RasterLayers because some files may have multiple bands. - - meta : dict - A dict containing the raster metadata. The dict contains the - following keys/values: - - crs : the crs object - transform : the Affine.affine transform object - width : width of the Raster in pixels - height : height of the Raster in pixels - count : number of RasterLayers within the Raster - dtype : the numpy datatype that represents lowest common - denominator of the different dtypes for all of the layers - in the Raster. - - names : list - A list of the RasterLayer names. - - block_shape : tuple - The default block_shape in (rows, cols) for reading windows of data - in the Raster for out-of-memory processing. - """ - - def __init__( - self, - src, - crs=None, - transform=None, - nodata=None, - file_path=None, - driver=None, - tempdir=tempfile.tempdir, - in_memory=False, - ): - """Initiate a new Raster object - - Parameters - ---------- - src : file path, RasterLayer, rasterio dataset, or a ndarray - Initiate a Raster object from any combination of a file - path or list of file paths to GDAL-supported raster - datasets, RasterLayer objects, or directly from a rasterio - dataset or band object that is opened in 'r' or 'rw' mode. - - A Raster object can also be created directly from a numpy - array in [band, rows, cols] order. The additional arguments - `crs` and `transform` should also be provided to supply - spatial coordinate information. - - crs : rasterio.crs.CRS object (optional, default is None) - CRS object containing projection information for data if - provided as an array. - - transform : affine.Affine object (optional, default is None) - Affine object containing transform information for data if - provided as an array. - - nodata : any number (optional, default is None) - Assign a nodata value to the Raster dataset when `src` is - a ndarray. If a nodata value is not specified then it is - determined based on the minimum permissible value for the - array's data type. - - file_path : str (optional, default None) - Path to save new Raster object if created from an array. - - driver : str (optional, default=None) - A GDAL compatible driver to use when initiating a raster - from a numpy array. - - tempdir : str, default is tempfile.tempdir - Path to a directory to store temporary files that are - produced during geoprocessing operations. - - in_memory : bool, default is False - Whether to initiate the Raster from an array and store the - data in-memory using Rasterio's in-memory files. - - Returns - ------- - pyspatialml.Raster - Raster object containing the src layers stacked into a - single object. - """ - self.files = list() - self.meta = None - self._block_shape = (256, 256) - self.tempdir = tempdir - self._internal = frozenset( - ["_internal", "files", "meta", "_block_shape", "tempdir"] - ) - - src_layers = [] - - # get temporary file name if file_path is None - if file_path is None and isinstance(src, np.ndarray): - file_path, tfile = self._tempfile(file_path) - driver = "GTiff" - - # initiate from numpy array - if isinstance(src, np.ndarray): - if src.ndim == 2: - src = src[np.newaxis] - count, height, width = src.shape - - if in_memory is True: - memfile = MemoryFile() - dst = memfile.open( - height=height, - width=width, - count=count, - driver=driver, - dtype=src.dtype, - crs=crs, - transform=transform, - nodata=nodata, - ) - dst.write(src) - else: - with rasterio.open( - file_path, - mode="w", - driver=driver, - height=height, - width=width, - count=count, - dtype=src.dtype, - crs=crs, - transform=transform, - nodata=nodata, - ) as dst: - dst.write(src) - dst = rasterio.open(file_path, "r") - - for i in range(dst.count): - band = rasterio.band(dst, i + 1) - rasterlayer = RasterLayer(band) - if in_memory is True: - rasterlayer.in_memory = True - src_layers.append(rasterlayer) - - if tfile is not None and in_memory is False: - for layer in src_layers: - layer._close = tfile.close - self._layers = src_layers - return - - # from a single file path - elif isinstance(src, str): - src_layers = [] - r = rasterio.open(src, mode="r", driver=driver) - for i in range(r.count): - band = rasterio.band(r, i + 1) - src_layers.append(RasterLayer(band)) - self._layers = src_layers - return - - # from a single RasterLayer - elif isinstance(src, RasterLayer): - self._layers = src - self._rename_inplace(list(self.names)[0], src.name) - return - - # from a single Raster - elif isinstance(src, Raster): - self._layers = [i for i in src.values()] - for old, new in zip(self.names, list(src.names)): - self._rename_inplace(old, new) - return - - # from a single rasterio.io.datasetreader/writer - elif isinstance(src, rasterio.io.DatasetReader): - src_layers = [] - for i in range(src.count): - band = rasterio.band(src, i + 1) - src_layers.append(RasterLayer(band)) - self._layers = src_layers - return - - # from a single rasterio.band object - elif isinstance(src, rasterio.Band): - self._layers = RasterLayer(src) - return - - # from a list of objects - elif isinstance(src, list): - # list of file paths (str) - if all(isinstance(x, str) for x in src): - src_layers = [] - for f in src: - r = rasterio.open(f, mode="r", driver=driver) - for i in range(r.count): - band = rasterio.band(r, i + 1) - src_layers.append(RasterLayer(band)) - - self._layers = src_layers - return - - # list of RasterLayer objects - elif all(isinstance(x, RasterLayer) for x in src): - self._layers = src - for old, new in zip(self.names, src): - self._rename_inplace(old, new.name) - return - - # list of rasterio.io.datasetreader objects - elif all(isinstance(x, rasterio.io.DatasetReader) for x in src): - src_layers = [] - for r in src: - for i in range(r.count): - band = rasterio.band(r, i + 1) - src_layers.append(RasterLayer(band)) - self._layers = src_layers - return - - # from a list of rasterio.band objects - elif all(isinstance(x, rasterio.Band) for x in src): - src_layers = [] - for band in src: - src_layers.append(RasterLayer(band)) - self._layers = src_layers - return - else: - raise ValueError("Cannot create a Raster object from a mixture of inputs") - - @property - def block_shape(self) -> Tuple[int, int]: - """Return the block shape in (height, width) used to read windows from the - Raster - """ - return self._block_shape - - @block_shape.setter - def block_shape(self, value) -> None: - if not isinstance(value, tuple): - raise ValueError( - "block_shape must be set using an integer tuple as (rows, " "cols)" - ) - rows, cols = value - - if not isinstance(rows, int) or not isinstance(cols, int): - raise ValueError( - "tuple must consist of integer values referring to number of " - "rows, cols" - ) - self._block_shape = (rows, cols) - - def set_block_shape(self, value) -> None: - """Set the block shape of the raster, i.e. the height and width - of windows to read in chunks for the predict, predict_proba, - apply, and other supported-methods. - - Note block shape can also be set with `myraster.block_shape = (500, 500)` - - Parameters - ---------- - value : tuple - A tuple of (height, width) for the block window - """ - self.block_shape = value - - @property - def count(self) -> int: - """Return the number of layers in the Raster""" - return len(self.loc) - - @property - def crs(self) -> rasterio.crs.CRS: - """Return to crs of the Raster""" - return self.meta["crs"] - - @crs.setter - def crs(self, value) -> None: - self.meta["crs"] = value - - @property - def transform(self) -> affine.Affine: - """Return the transform of the Raster""" - return self.meta["transform"] - - @transform.setter - def transform(self, value) -> None: - self.meta["transform"] = value - - @property - def width(self) -> int: - """Return the width (number of columns) in the Raster""" - return self.meta["width"] - - @property - def height(self) -> int: - """Return the height (number of rows) in the Raster""" - return self.meta["height"] - - @property - def shape(self) -> Tuple[int, int]: - """Return the shape (height, width) of the Raster""" - return self.height, self.width - - @property - def res(self) -> Tuple[float, float]: - """Return a tuple of the resolution of the Raster in (width, height)""" - return abs(self.meta["transform"].a), abs(self.meta["transform"].e) - - @property - def bounds(self) -> namedtuple: - """Return the bounding box of the raster in (left, bottom, right, top)""" - bounds = rasterio.transform.array_bounds( - self.height, self.width, self.transform - ) - BoundingBox = namedtuple("BoundingBox", ["left", "bottom", "right", "top"]) - return BoundingBox(bounds[0], bounds[1], bounds[2], bounds[3]) - - @property - def dtypes(self) -> list: - """Return the dtype of each layer in the Raster as a list""" - dtypes = list() - - for layer in self.loc.values(): - dtypes.append(layer.dtype) - - return dtypes - - @property - def nodatavals(self) -> list: - """Return the nodata value of each layer in the Raster as a list""" - nodatavals = list() - - for layer in self.loc.values(): - try: - nodatavals.append(layer.nodata) - except: - nodatavals.append(None) - - return nodatavals - - @property - def _layers(self) -> dict: - return self.loc - - @_layers.setter - def _layers(self, layers) -> None: - """Assign RasterLayer objects to the Raster - - The function assigns the layers to the loc indexer, updates - the `files` attribute and assigns syntactically-correct names - to each layer. - - Parameters - ---------- - layers : list - A list of pyspatialml.RasterLayer objects - """ - if isinstance(layers, RasterLayer): - layers = [layers] - - if all(isinstance(x, type(layers[0])) for x in layers) is False: - raise ValueError("Cannot create a Raster object from a mixture of inputs") - - meta = self._check_alignment(layers) - - if meta is False: - raise ValueError( - "Raster datasets do not have the same dimensions/transform" - ) - - # reset locindexer - self.files = list() - for key in self.loc.keys(): - self.loc.pop(key) - - # update global Raster object attributes with new values - names = [i.name for i in layers] - names = self._fix_names(names) - - # update attributes per dataset - for layer, name in zip(layers, names): - self.files.append(layer.file) - layer.name = name - self.loc[name] = layer - - self.meta = dict( - crs=meta["crs"], - transform=meta["transform"], - width=meta["width"], - height=meta["height"], - count=self.count, - dtype=np.result_type(*self.dtypes), - ) - - @staticmethod - def _fix_names(combined_names): - """Adjusts the names of pyspatialml.RasterLayer objects within the - Raster when appending new layers. - - This avoids the Raster object containing duplicated names in the - case that multiple RasterLayers are appended with the same name. - - In the case of duplicated names, the RasterLayer names are appended - with a `_n` with n = 1, 2, 3 .. n. - - Parameters - ---------- - combined_names : list - List of str representing names of RasterLayers. Any duplicates - will have a suffix appended to them. - - Returns - ------- - list - List with adjusted names - """ - counts = Counter(combined_names) - - for s, num in counts.items(): - if num > 1: - for suffix in range(1, num + 1): - if s + "_" + str(suffix) not in combined_names: - combined_names[combined_names.index(s)] = s + "_" + str(suffix) - else: - i = 1 - while s + "_" + str(i) in combined_names: - i += 1 - combined_names[combined_names.index(s)] = s + "_" + str(i) - - return combined_names - - @staticmethod - def _check_alignment(layers): - """Check that a list of raster datasets are aligned with the same - pixel dimensions and geotransforms. - - Parameters - ---------- - layers : list - List of pyspatialml.RasterLayer objects. - - Returns - ------- - dict or False - Dict of metadata if all layers are spatially aligned, otherwise - returns False. - """ - - src_meta = [] - for layer in layers: - src_meta.append(layer.ds.meta.copy()) - - if not all(i["crs"] == src_meta[0]["crs"] for i in src_meta): - Warning("crs of all rasters does not match, possible unintended consequences") - - if not all( - [ - i["height"] == src_meta[0]["height"] - or i["width"] == src_meta[0]["width"] - or i["transform"] == src_meta[0]["transform"] - for i in src_meta - ] - ): - return False - - else: - return src_meta[0] - - def _check_supported_dtype(self, dtype=None) -> str: - """Method to check that a dtype is compatible with GDAL or - generate a compatible dtype from an array - - Parameters - ---------- - dtype : str, dtype, ndarray or None - Pass a dtype (as a string or dtype) to check compatibility. - Pass an array to generate a compatible dtype from the - array. Pass None to use the existing dtype of the parent - Raster object. - - Returns - ------- - dtype : dtype - GDAL compatible dtype - """ - if dtype is None: - dtype = self.meta["dtype"] - - elif isinstance(dtype, np.ndarray): - dtype = rasterio.dtypes.get_minimum_dtype(dtype) - - else: - if rasterio.dtypes.check_dtype(dtype) is False: - raise AttributeError( - "{dtype} is not a support GDAL dtype".format(dtype=dtype) - ) - - return dtype - - def _tempfile(self, file_path) -> Tuple[str, str]: - """Returns a TemporaryFileWrapper and file path if a file_path - parameter is None - """ - if file_path is None: - if os.name != "nt": - tfile = tempfile.NamedTemporaryFile(dir=self.tempdir, suffix=".tif") - file_path = tfile.name - else: - tfile = TempRasterLayer() - file_path = tfile.name - - else: - tfile = None - - return file_path, tfile - - def _copy(self, src, names=None): - """Return a new Raster object from a list of files but - retaining the attributes of the parent Raster. - - Designed to be used internally to copy a Raster object. - - Parameters - ---------- - src : List of RasterLayers or file paths - List of RasterLayers or file paths used create the new - Raster object. - - names : list (optional, default None) - List to name the RasterLayer objects in the stack. If not - supplied then the names will be generated from the file - names. - - Returns - ------- - pyspatialml.Raster - """ - if not isinstance(src, (list, ValuesView)): - src = [src] - - raster = Raster(src) - - # rename and copy attributes - if names is not None: - for (old, new) in zip(raster.names, names): - raster._rename_inplace(old, new) - - for old_layer, new_layer in zip(self.loc.values(), list(raster.loc.values())): - new_layer.cmap = old_layer.cmap - new_layer.norm = old_layer.norm - new_layer.categorical = old_layer.categorical - - raster.block_shape = self.block_shape - - return raster - - @staticmethod - def _apply_transformer(img, transformer): - img = np.ma.masked_invalid(img) - mask = img.mask.copy() - - # reshape into 2D array - n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] - flat_pixels = img.reshape((rows * cols, n_features)) - flat_pixels = flat_pixels.filled(0) - - # predict and replace mask - result = transformer.transform(flat_pixels) - - # reshape the prediction from a 1D into 3D array [band, row, col] - result = result.reshape((n_features, rows, cols)) - result = np.ma.masked_array(data=result, mask=mask, copy=True) - - return result - - def head(self) -> np.ndarray: - """Return the first 10 rows from the Raster as a ndarray""" - window = Window(col_off=0, row_off=0, width=20, height=10) - return self.read(window=window) - - def tail(self) -> np.ndarray: - """Return the last 10 rows from the Raster as a ndarray""" - window = Window( - col_off=self.width - 20, row_off=self.height - 10, width=20, height=10 - ) - return self.read(window=window) - - def close(self) -> None: - """Close all of the RasterLayer objects in the Raster. - - Note that this will cause any rasters based on temporary files - to be removed. This is intended as a method of clearing - temporary files that may have accumulated during an analysis - session. - """ - for layer in self.loc.values(): - layer.close() - - def copy(self, subset=None): - """Creates a shallow copy of a Raster object - - Note that shallow in the context of a Raster object means that - an immutable copy of the object is made, however the on-disk and - in-memory file locations remain the same. - - Parameters - ---------- - subset : opt - A list of layer names to subset while copying. - - Returns - ------- - Raster - """ - if subset is not None: - if isinstance(subset, str): - subset = [subset] - layers = list(self.loc[subset].values()) - else: - layers = list(self.loc.values()) - - return self._copy(layers) - - def block_shapes(self, rows, cols): - """Generator for windows for optimal reading and writing based - on the raster format Windows and returns as a tuple with xoff, - yoff, width, height. - - Parameters - ---------- - rows : int - Height of window in rows. - - cols : int - Width of window in columns. - """ - for i, col in enumerate(range(0, self.width, cols)): - if col + cols < self.width: - num_cols = cols - else: - num_cols = self.width - col - - for j, row in enumerate(range(0, self.height, rows)): - if row + rows < self.height: - num_rows = rows - else: - num_rows = self.height - row - - yield Window(col, row, num_cols, num_rows) - - def read( - self, - masked=False, - window=None, - out_shape=None, - resampling="nearest", - as_df=False, - **kwargs - ) -> np.ndarray: - """Reads data from the Raster object into a numpy array. - - Parameters - ---------- - masked : bool (default False) - Read data into a masked array. - - window : rasterio.window.Window object (optional, default None) - Tuple of col_off, row_off, width, height of a window of - data to read a chunk of data into a ndarray. - - out_shape : tuple (optional, default None) - Shape of shape of array (rows, cols) to read data into - using decimated reads. - - resampling : str (default 'nearest') - Resampling method to use when applying decimated reads when - out_shape is specified. Supported methods are: 'average', - 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', - 'max', 'med', 'min', 'mode', 'q1', 'q3'. - - as_df : bool (default False) - Whether to return the data as a pandas.DataFrame with - columns named by the RasterLayer names. - - **kwargs : dict - Other arguments to pass to rasterio.DatasetReader.read method - - Returns - ------- - ndarray - Raster values in 3d ndarray with the dimensions in order - of (band, row, and column). - """ - dtype = self.meta["dtype"] - - # get window to read from window or height/width of dataset - if window is None: - width = self.width - height = self.height - else: - width = window.width - height = window.height - - # decimated reads using nearest neighbor resampling - if out_shape: - height, width = out_shape - - # read bands separately into numpy array - if masked is True: - arr = np.ma.zeros((self.count, height, width), dtype=dtype) - else: - arr = np.zeros((self.count, height, width), dtype=dtype) - - for i, layer in enumerate(self.loc.values()): - arr[i, :, :] = layer.read( - masked=masked, - window=window, - out_shape=out_shape, - resampling=resampling, - **kwargs - ) - - if masked is True: - arr[i, :, :] = np.ma.MaskedArray( - data=arr[i, :, :], mask=np.isfinite(arr[i, :, :]).mask - ) - - if as_df is True: - # reshape to rows, cols, bands - arr = arr.transpose(1, 2, 0) - arr_flat = arr.reshape((arr.shape[0] * arr.shape[1], arr.shape[2])) - df = pd.DataFrame(data=arr_flat, columns=self.names) - return df - - return arr - - def write( - self, file_path, driver="GTiff", dtype=None, nodata=None, **kwargs - ): - """Write the Raster object to a file. - - Overrides the write RasterBase class method, which is a partial - function of the rasterio.DatasetReader.write method. - - Parameters - ---------- - file_path : str - File path used to save the Raster object. - - driver : str (default is 'GTiff'). - Name of GDAL driver used to save Raster data. - - dtype : str (opt, default None) - Optionally specify a numpy compatible data type when - saving to file. If not specified, a data type is selected - based on the data types of RasterLayers in the Raster - object. - - nodata : any number (opt, default None) - Optionally assign a new nodata value when saving to file. - If not specified a nodata value based on the minimum - permissible value for the data types of RasterLayers in the - Raster object is used. Note that this does not change the - pixel nodata values of the raster, it only changes the - metadata of what value represents a nodata pixel. - - kwargs : opt - Optional named arguments to pass to the format drivers. - For example can be `compress="deflate"` to add compression. - - Returns - ------- - Raster - New Raster object from saved file. - """ - dtype = self._check_supported_dtype(dtype) - - if nodata is None: - nodata = get_nodata_value(dtype) - - meta = self.meta.copy() - meta["driver"] = driver - meta["nodata"] = nodata - meta["dtype"] = dtype - meta.update(kwargs) - - with rasterio.open(file_path, mode="w", **meta) as dst: - - for i, layer in enumerate(self.loc.values()): - arr = layer.read() - arr[arr == layer.nodata] = nodata - dst.write(arr.astype(dtype), i + 1) - - return self._copy(file_path, self.names) - - def predict_proba( - self, - estimator, - file_path=None, - in_memory=False, - indexes=None, - driver="GTiff", - dtype=None, - nodata=None, - constants=None, - progress=False, - **kwargs - ): - """Apply class probability prediction of a scikit learn model to a Raster. - - Parameters - ---------- - estimator : estimator object implementing 'fit' - The object to use to fit the data. - - file_path : str (optional, default None) - Path to a GeoTiff raster for the prediction results. If not - specified then the output is written to a temporary file. - - in_memory : bool, default is False - Whether to initiated the Raster from an array and store the - data in-memory using Rasterio's in-memory files. - - indexes : list of integers (optional, default None) - List of class indices to export. In some circumstances, - only a subset of the class probability estimations are - desired, for instance when performing a binary - classification only the probabilities for the positive - class may be desired. - - driver : str (default 'GTiff') - Named of GDAL-supported driver for file export. - - dtype : str (optional, default None) - Optionally specify a GDAL compatible data type when saving - to file. If not specified, a data type is set based on the - data type of the prediction. - - nodata : any number (optional, default None) - Nodata value for file export. If not specified then the - nodata value is derived from the minimum permissible value - for the given data type. - - progress : bool (default False) - Show progress bar for prediction. - - constants: list-like object or a dict (optional, default None) - Constant features to add to the Raster object with each value - in a list or 1d ndarray representing an additional feature. - - If a list-like object of values os passed, then each numeric - value will be appended as constant features to the last - columns in the data. It is therefore important that all - features including constant features are present in the same - order as what was used to train the model. - - If a dict is passed, then the keys of the dict must refer to - the names of raster layers in the Raster object. In this case, - the values of the dict will replace the values of the raster - layers in the Raster object. - - kwargs : opt - Optional named arguments to pass to the format drivers. - For example can be `compress="deflate"` to add compression. - - Returns - ------- - Raster - Raster containing predicted class probabilities. Each - predicted class is represented by a RasterLayer object. - The RasterLayers are named `prob_n` for 1,2,3..n, with `n` - based on the index position of the classes, not the number - of the class itself. - - For example, a classification model predicting classes with - integer values of 1, 3, and 5 would result in three - RasterLayers named 'prob_1', 'prob_2' and 'prob_3'. - """ - # some checks - tfile = None - - if in_memory is False: - file_path, tfile = self._tempfile(file_path) - - # n_jobs = get_num_workers(n_jobs) - probfun = partial( - predict_prob, - estimator=estimator, - constants=constants, - names=list(self.names), - ) - - # perform test prediction - window = Window(0, 0, 1, 1) - img = self.read(masked=True, window=window) - img = np.ma.masked_invalid(img) - n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] - n_samples = rows * cols - flat_pixels = img.transpose(1, 2, 0).reshape((n_samples, n_features)) - flat_pixels = flat_pixels.filled(0) - - if constants is not None: - flat_pixels = stack_constants(flat_pixels, constants, list(self.names)) - - result = estimator.predict_proba(flat_pixels) - - if isinstance(indexes, int): - indexes = range(indexes, indexes + 1) - - elif indexes is None: - indexes = np.arange(0, result.shape[1]) - - # check dtype and nodata - if dtype is None: - dtype = self._check_supported_dtype(result) - else: - dtype = self._check_supported_dtype(dtype) - - if nodata is None: - nodata = get_nodata_value(dtype) - - # open output file with updated metadata - meta = self.meta.copy() - count = len(indexes) - meta.update(driver=driver, count=count, dtype=dtype, nodata=nodata) - meta.update(kwargs) - - # get windows - windows = [w for w in self.block_shapes(*self.block_shape)] - data_gen = ((w, self.read(window=w, masked=True)) for w in windows) - counter = tqdm(windows, disable=not progress, total=len(windows)) - - # apply prediction function - if in_memory is False: - with rasterio.open(file_path, "w", **meta) as dst: - for w, res, pbar in zip(windows, map(probfun, data_gen), counter): - res = np.ma.filled(res, fill_value=nodata) - dst.write(res[indexes, :, :].astype(dtype), window=w) - - output_dst = file_path - - else: - with MemoryFile() as memfile: - dst = memfile.open( - height=meta["height"], - width=meta["width"], - count=meta["count"], - dtype=meta["dtype"], - crs=meta["crs"], - transform=meta["transform"], - nodata=meta["nodata"], - driver=driver, - ) - - for w, res, pbar in zip(windows, map(probfun, data_gen), counter): - res = np.ma.filled(res, fill_value=nodata) - dst.write(res[indexes, :, :].astype(dtype), window=w) - - output_dst = [ - RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) - ] - - for i in output_dst: - i.in_memory = True - - # create new Raster object with the result - prefix = "prob_" - names = [prefix + str(i) for i in range(len(indexes))] - new_raster = self._copy(output_dst, names) - - # override close method - if tfile is not None: - for layer in new_raster.iloc: - layer._close = tfile.close - - return new_raster - - def predict( - self, - estimator, - file_path=None, - in_memory=False, - driver="GTiff", - dtype=None, - nodata=None, - progress=False, - constants=None, - **kwargs - ): - """Apply prediction of a scikit learn model to a Raster. - - The model can represent any scikit learn model or compatible - api with a `fit` and `predict` method. These can consist of - classification or regression models. Multi-class - classifications and multi-target regressions are also - supported. - - Parameters - ---------- - estimator : estimator object implementing 'fit' - The object to use to fit the data. - - file_path : str (optional, default None) - Path to a GeoTiff raster for the prediction results. If - not specified then the output is written to a temporary - file. - - in_memory : bool, default is False - Whether to initiated the Raster from an array and store - the data in-memory using Rasterio's in-memory files. - - driver : str (default 'GTiff') - Named of GDAL-supported driver for file export - - dtype : str (optional, default None) - Optionally specify a GDAL compatible data type when saving - to file. If not specified, np.float32 is assumed. - - nodata : any number (optional, default None) - Nodata value for file export. If not specified then the - nodata value is derived from the minimum permissible value - for the given data type. - - progress : bool (default False) - Show progress bar for prediction. - - constants: list-like object or a dict (optional, default None) - Constant features to add to the Raster object with each value - in a list or 1d ndarray representing an additional feature. - - If a list-like object of values os passed, then each numeric - value will be appended as constant features to the last - columns in the data. It is therefore important that all - features including constant features are present in the same - order as what was used to train the model. - - If a dict is passed, then the keys of the dict must refer to - the names of raster layers in the Raster object. In this case, - the values of the dict will replace the values of the raster - layers in the Raster object. - - kwargs : opt - Optional named arguments to pass to the format drivers. - For example can be `compress="deflate"` to add compression. - - Returns - ------- - Raster - Raster object containing prediction results as a - RasterLayers. For classification and regression models, the - Raster will contain a single RasterLayer, unless the model - is multi-class or multi-target. Layers are named - automatically as `pred_raw_n` with n = 1, 2, 3 ..n. - """ - tfile = None - - if in_memory is False: - file_path, tfile = self._tempfile(file_path) - - # n_jobs = get_num_workers(n_jobs) - - # determine output count for multi-class or multi-target cases - window = Window(0, 0, 1, 1) - img = self.read(masked=True, window=window) - img = np.ma.masked_invalid(img) - n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] - n_samples = rows * cols - flat_pixels = img.transpose(1, 2, 0).reshape((n_samples, n_features)) - flat_pixels = flat_pixels.filled(0) - - if constants is not None: - flat_pixels = stack_constants(flat_pixels, constants, list(self.names)) - - result = estimator.predict(flat_pixels) - - if result.ndim > 1: - n_outputs = result.shape[result.ndim - 1] - else: - n_outputs = 1 - - indexes = np.arange(0, n_outputs) - - # chose prediction function - if len(indexes) == 1: - if constants is not None: - predfun = partial( - predict_output, - estimator=estimator, - constants=constants, - names=list(self.names), - ) - else: - predfun = partial( - predict_output, - estimator=estimator, - constants=constants, - names=list(self.names), - ) - else: - predfun = partial( - predict_multioutput, - estimator=estimator, - constants=constants, - names=list(self.names), - ) - - # check dtype and nodata - if dtype is None: - dtype = self._check_supported_dtype(result) - else: - dtype = self._check_supported_dtype(dtype) - - if nodata is None: - nodata = get_nodata_value(dtype) - - # open output file with updated metadata - meta = self.meta.copy() - count = len(indexes) - meta.update(driver=driver, count=count, dtype=dtype, nodata=nodata) - meta.update(kwargs) - - # get windows - windows = [w for w in self.block_shapes(*self.block_shape)] - data_gen = ((w, self.read(window=w, masked=True)) for w in windows) - counter = tqdm(windows, disable=not progress, total=len(windows)) - - if in_memory is False: - with rasterio.open(file_path, "w", **meta) as dst: - for w, res, pbar in zip(windows, map(predfun, data_gen), counter): - res = np.ma.filled(res, fill_value=nodata) - dst.write(res[indexes, :, :].astype(dtype), window=w) - - output_dst = file_path - - else: - with MemoryFile() as memfile: - dst = memfile.open( - height=meta["height"], - width=meta["width"], - count=meta["count"], - dtype=meta["dtype"], - crs=meta["crs"], - driver=driver, - transform=meta["transform"], - nodata=meta["nodata"], - ) - - for w, res, pbar in zip(windows, map(predfun, data_gen), counter): - res = np.ma.filled(res, fill_value=nodata) - dst.write(res[indexes, :, :].astype(dtype), window=w) - - output_dst = [ - RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) - ] - - for i in output_dst: - i.in_memory = True - - # create new Raster object with the result - prefix = "pred_raw_" - names = [prefix + str(i) for i in range(len(indexes))] - new_raster = self._copy(output_dst, names) - - # override close method - if tfile is not None: - for layer in new_raster.iloc: - layer._close = tfile.close - - return new_raster - - def append(self, other, in_place=False): - """Method to add new RasterLayers to a Raster object. - - Note that this modifies the Raster object in-place by default. - - Parameters - ---------- - other : Raster object, or list of Raster objects - Object to append to the Raster. - - in_place : bool (default False) - Whether to change the Raster object in-place or leave - original and return a new Raster object. - - Returns - ------- - Raster - Returned only if `in_place` is False - """ - if isinstance(other, Raster): - other = [other] - - combined_names = self.names - combined_layers = list(self.loc.values()) - - for new_raster in other: - if not isinstance(new_raster, Raster): - raise AttributeError(new_raster + " is not a pyspatialml.Raster object") - - # check that other raster does not result in duplicated names - combined_names = list(combined_names) + list(new_raster.names) - combined_names = self._fix_names(combined_names) - - # update layers and names - combined_layers = combined_layers + list(new_raster.loc.values()) - - for layer, name in zip(combined_layers, combined_names): - layer.names = [name] - - if in_place is True: - self._layers = combined_layers - self.names = combined_names - else: - new_raster = self._copy(self.files, self.names) - new_raster._layers = combined_layers - return new_raster - - def drop(self, labels, in_place=False): - """Drop individual RasterLayers from a Raster object - - Note that this modifies the Raster object in-place by default. - - Parameters - --------- - labels : single label or list-like - Index (int) or layer name to drop. Can be a single integer - or label, or a list of integers or labels. - - in_place : bool (default False) - Whether to change the Raster object in-place or leave - original and return a new Raster object. - - Returns - ------- - pyspatialml.Raster - Returned only if `in_place` is True - """ - # convert single label to list - if isinstance(labels, (str, int)): - labels = [labels] - - # numerical index based subsetting - if len([i for i in labels if isinstance(i, int)]) == len(labels): - subset_layers = [ - v for (i, v) in enumerate(list(self.loc.values())) if i not in labels - ] - - # str label based subsetting - elif len([i for i in labels if isinstance(i, str)]) == len(labels): - subset_layers = [ - v - for (i, v) in enumerate(list(self.loc.values())) - if list(self.names)[i] not in labels - ] - - else: - raise ValueError( - "Cannot drop layers based on mixture of indexes and labels" - ) - - if in_place is True: - self._layers = subset_layers - else: - new_raster = self._copy(self.files, self.names) - new_raster._layers = subset_layers - - return new_raster - - def rename(self, names, in_place=False): - """Rename a RasterLayer within the Raster object. - - Parameters - ---------- - names : dict - dict of old_name : new_name - - in_place : bool (default False) - Whether to change names of the Raster object in-place or - leave original and return a new Raster object. - - Returns - ------- - pyspatialml.Raster - Returned only if `in_place` is False - """ - if in_place is True: - for old_name, new_name in names.items(): - self._rename_inplace(old_name, new_name) - else: - new_raster = self._copy(src=[v for (_, v) in self.items()]) - - for old_name, new_name in names.items(): - new_raster._rename_inplace(old_name, new_name) - - return new_raster - - def mask( - self, - shapes, - invert=False, - crop=True, - pad=False, - file_path=None, - in_memory=False, - driver="GTiff", - dtype=None, - nodata=None, - **kwargs - ): - """Mask a Raster object based on the outline of shapes in a - geopandas.GeoDataFrame - - Parameters - ---------- - shapes : geopandas.GeoDataFrame - GeoDataFrame containing masking features. - - invert : bool (default False) - If False then pixels outside shapes will be masked. If True - then pixels inside shape will be masked. - - crop : bool (default True) - Crop the raster to the extent of the shapes. - - pad : bool (default False) - If True, the features will be padded in each direction by - one half of a pixel prior to cropping raster. - - file_path : str (optional, default None) - File path to save to resulting Raster. If not supplied - then the resulting Raster is saved to a temporary file. - - in_memory : bool, default is False - Whether to initiated the Raster from an array and store - the data in-memory using Rasterio's in-memory files. - - driver : str (default 'GTiff') - Named of GDAL-supported driver for file export. - - dtype : str (optional, default None) - Coerce RasterLayers to the specified dtype. If not - specified then the cropped Raster is created using the - existing dtype, which usesa dtype that can accommodate the - data types of all of the individual RasterLayers. - - nodata : any number (optional, default None) - Nodata value for cropped dataset. If not specified then a - nodata value is set based on the minimum permissible value - of the Raster's data type. Note that this changes the - values of the pixels to the new nodata value, and changes - the metadata of the raster. - - kwargs : opt - Optional named arguments to pass to the format drivers. - For example can be `compress="deflate"` to add compression. - - Returns - ------- - pyspatialml.Raster - Raster with masked layers. - """ - # some checks - if invert is True: - crop = False - - tfile = None - - if in_memory is False: - file_path, tfile = self._tempfile(file_path) - - meta = self.meta.copy() - dtype = self._check_supported_dtype(dtype) - - if nodata is None: - nodata = get_nodata_value(dtype) - - meta["dtype"] = dtype - - masked_ndarrays = [] - - for layer in self.loc.values(): - # set pixels outside of mask to raster band's nodata value - masked_arr, transform = rasterio.mask.mask( - dataset=layer.ds, - shapes=[shapes.geometry.unary_union], - filled=False, - invert=invert, - crop=crop, - pad=pad, - ) - - if layer.ds.count > 1: - masked_arr = masked_arr[layer.bidx - 1, :, :] - - else: - masked_arr = np.ma.squeeze(masked_arr) - - masked_ndarrays.append(masked_arr) - - # stack list of 2d arrays into 3d array - masked_ndarrays = np.ma.stack(masked_ndarrays) - - # write to file - meta["transform"] = transform - meta["driver"] = driver - meta["nodata"] = nodata - meta["height"] = masked_ndarrays.shape[1] - meta["width"] = masked_ndarrays.shape[2] - meta.update(kwargs) - masked_ndarrays = masked_ndarrays.filled(fill_value=nodata) - - if in_memory is False: - with rasterio.open(file_path, "w", **meta) as dst: - dst.write(masked_ndarrays.astype(dtype)) - output_dst = file_path - else: - with MemoryFile() as memfile: - dst = memfile.open(**meta) - dst.write(masked_ndarrays.astype(dtype)) - - output_dst = [ - RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) - ] - for i in output_dst: - i.in_memory = True - - # create new Raster object with the result - new_raster = self._copy(output_dst, self.names) - - # override close method - if tfile is not None: - for layer in new_raster.iloc: - layer._close = tfile.close - - return new_raster - - def intersect( - self, - file_path=None, - in_memory=False, - driver="GTiff", - dtype=None, - nodata=None, - **kwargs - ): - """Perform a intersect operation on the Raster object. - - Computes the geometric intersection of the RasterLayers with - the Raster object. This will cause nodata values in any of - the rasters to be propagated through all of the output rasters. - - Parameters - ---------- - file_path : str (optional, default None) - File path to save to resulting Raster. If not supplied then - the resulting Raster is saved to a temporary file. - - in_memory : bool, default is False - Whether to initiated the Raster from an array and store the - data in-memory using Rasterio's in-memory files. - - driver : str (default 'GTiff') - Named of GDAL-supported driver for file export. - - dtype : str (optional, default None) - Coerce RasterLayers to the specified dtype. If not - specified then the new intersected Raster is created using - the dtype of the existing Raster dataset, which uses a - dtype that can accommodate the data types of all of the - individual RasterLayers. - - nodata : any number (optional, default None) - Nodata value for new dataset. If not specified then a - nodata value is set based on the minimum permissible value - of the Raster's data type. Note that this changes the - values of the pixels that represent nodata to the new - value. - - kwargs : opt - Optional named arguments to pass to the format drivers. - For example can be `compress="deflate"` to add compression. - - Returns - ------- - Raster - Raster with layers that are masked based on a union of all - masks in the suite of RasterLayers. - """ - tfile = None - - if in_memory is False: - file_path, tfile = self._tempfile(file_path) - - meta = self.meta.copy() - dtype = self._check_supported_dtype(dtype) - - if nodata is None: - nodata = get_nodata_value(dtype) - - arr = self.read(masked=True) - mask_2d = arr.mask.any(axis=0) - - # repeat mask for n_bands - mask_3d = np.repeat(a=mask_2d[np.newaxis, :, :], repeats=self.count, axis=0) - - intersected_arr = np.ma.masked_array(arr, mask=mask_3d, fill_value=nodata) - intersected_arr = np.ma.filled(intersected_arr, fill_value=nodata) - - meta["driver"] = driver - meta["nodata"] = nodata - meta["dtype"] = dtype - meta.update(kwargs) - - if in_memory is False: - with rasterio.open(file_path, "w", **meta) as dst: - dst.write(intersected_arr.astype(dtype)) - output_dst = file_path - else: - with MemoryFile() as memfile: - dst = memfile.open(**meta) - dst.write(intersected_arr.astype(dtype)) - - output_dst = [ - RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) - ] - for i in output_dst: - i.in_memory = True - - # create new Raster object with the result - new_raster = self._copy(output_dst, self.names) - - # override close method - if tfile is not None: - for layer in new_raster.iloc: - layer._close = tfile.close - - return new_raster - - def crop( - self, - bounds, - file_path=None, - in_memory=False, - driver="GTiff", - dtype=None, - nodata=None, - **kwargs - ): - """Crops a Raster object by the supplied bounds. - - Parameters - ---------- - bounds : tuple - A tuple containing the bounding box to clip by in the form - of (xmin, ymin, xmax, ymax). - - file_path : str (optional, default None) - File path to save to cropped raster. If not supplied then - the cropped raster is saved to a temporary file. - - in_memory : bool, default is False - Whether to initiated the Raster from an array and store - the data in-memory using Rasterio's in-memory files. - - driver : str (default 'GTiff'). Default is 'GTiff' - Named of GDAL-supported driver for file export. - - dtype : str (optional, default None) - Coerce RasterLayers to the specified dtype. If not - specified then the new intersected Raster is created using - the dtype of theexisting Raster dataset, which uses a - dtype that can accommodate the data types of all of the - individual RasterLayers. - - nodata : any number (optional, default None) - Nodata value for new dataset. If not specified then a - nodata value is set based on the minimum permissible value - of the Raster's data type. Note that this does not change - the pixel nodata values of the raster, it only changes - the metadata of what value represents a nodata pixel. - - kwargs : opt - Optional named arguments to pass to the format drivers. - For example can be `compress="deflate"` to add compression. - - Returns - ------- - Raster - Raster cropped to new extent. - """ - tfile = None - - if in_memory is False: - file_path, tfile = self._tempfile(file_path) - - dtype = self._check_supported_dtype(dtype) - if nodata is None: - nodata = get_nodata_value(dtype) - - # get row, col positions for bounds - xmin, ymin, xmax, ymax = bounds - rows, cols = rasterio.transform.rowcol( - transform=self.transform, xs=(xmin, xmax), ys=(ymin, ymax) - ) - - # create window covering the min/max rows and cols - window = Window( - col_off=min(cols), - row_off=min(rows), - width=max(cols) - min(cols), - height=max(rows) - min(rows), - ) - cropped_arr = self.read(masked=True, window=window) - - # calculate the new transform - new_transform = rasterio.transform.from_bounds( - west=xmin, - south=ymin, - east=xmax, - north=ymax, - width=cropped_arr.shape[2], - height=cropped_arr.shape[1], - ) - - # update the destination meta - meta = self.meta.copy() - meta.update( - transform=new_transform, - width=cropped_arr.shape[2], - height=cropped_arr.shape[1], - driver=driver, - nodata=nodata, - dtype=dtype, - ) - meta.update(kwargs) - cropped_arr = cropped_arr.filled(fill_value=nodata) - - if in_memory is False: - with rasterio.open(file_path, "w", **meta) as dst: - dst.write(cropped_arr.astype(dtype)) - output_dst = file_path - - else: - with MemoryFile() as memfile: - dst = memfile.open(**meta) - dst.write(cropped_arr.astype(dtype)) - - output_dst = [ - RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) - ] - for i in output_dst: - i.in_memory = True - - new_raster = self._copy(output_dst, self.names) - - if tfile is not None: - for layer in new_raster.iloc: - layer._close = tfile.close - - return new_raster - - def to_crs( - self, - crs, - resampling="nearest", - file_path=None, - in_memory=False, - driver="GTiff", - nodata=None, - n_jobs=1, - warp_mem_lim=0, - progress=False, - **kwargs - ): - """Reprojects a Raster object to a different crs. - - Parameters - ---------- - crs : rasterio.transform.CRS object, or dict - Example: CRS({'init': 'EPSG:4326'}) - - resampling : str (default 'nearest') - Resampling method to use. One of the following: - nearest, - bilinear, - cubic, - cubic_spline, - lanczos, - average, - mode, - max (GDAL >= 2.2), - min (GDAL >= 2.2), - med (GDAL >= 2.2), - q1 (GDAL >= 2.2), - q3 (GDAL >= 2.2) - - file_path : str (optional, default None) - Optional path to save reprojected Raster object. If not - specified then a tempfile is used. - - in_memory : bool, default is False - Whether to initiated the Raster from an array and store the - data in-memory using Rasterio's in-memory files. - - driver : str (default 'GTiff') - Named of GDAL-supported driver for file export. - - nodata : any number (optional, default None) - Nodata value for new dataset. If not specified then the - existing nodata value of the Raster object is used, which - can accommodate the dtypes of the individual layers in the - Raster. - - n_jobs : int (default 1) - The number of warp worker threads. - - warp_mem_lim : int (default 0) - The warp operation memory limit in MB. Larger values allow - the warp operation to be carried out in fewer chunks. The - amount of memory required to warp a 3-band uint8 2000 row - x 2000 col raster to a destination of the same size is - approximately 56 MB. The default (0) means 64 MB with GDAL - 2.2. - - progress : bool (default False) - Optionally show progress of transform operations. - - kwargs : opt - Optional named arguments to pass to the format drivers. - For example can be `compress="deflate"` to add compression. - - Returns - ------- - Raster - Raster following reprojection. - """ - tfile = None - - if in_memory is False: - file_path, tfile = self._tempfile(file_path) - - if nodata is None: - nodata = get_nodata_value(self.meta["dtype"]) - - resampling_methods = [i.name for i in rasterio.enums.Resampling] - if resampling not in resampling_methods: - raise ValueError( - "Resampling method must be one of {}:".format(resampling_methods) - ) - - dst_transform, dst_width, dst_height = calculate_default_transform( - src_crs=self.crs, - dst_crs=crs, - width=self.width, - height=self.height, - left=self.bounds.left, - right=self.bounds.right, - bottom=self.bounds.bottom, - top=self.bounds.top, - ) - - meta = self.meta.copy() - meta["nodata"] = nodata - meta["width"] = dst_width - meta["height"] = dst_height - meta["transform"] = dst_transform - meta["crs"] = crs - meta.update(kwargs) - - if progress is True: - t = tqdm(total=self.count) - - if in_memory is False: - with rasterio.open(file_path, "w", driver=driver, **meta) as dst: - for i, layer in enumerate(self.loc.values()): - reproject( - source=rasterio.band(layer.ds, layer.bidx), - destination=rasterio.band(dst, i + 1), - resampling=rasterio.enums.Resampling[resampling], - num_threads=n_jobs, - warp_mem_lim=warp_mem_lim, - ) - - if progress is True: - t.update() - - output_dst = file_path - - else: - with MemoryFile() as memfile: - dst = memfile.open(driver=driver, **meta) - for i, layer in enumerate(self.loc.values()): - reproject( - source=rasterio.band(layer.ds, layer.bidx), - destination=rasterio.band(dst, i + 1), - resampling=rasterio.enums.Resampling[resampling], - num_threads=n_jobs, - warp_mem_lim=warp_mem_lim, - ) - - if progress is True: - t.update() - - output_dst = [ - RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) - ] - - for i in output_dst: - i.in_memory = True - - new_raster = self._copy(output_dst, self.names) - - if tfile is not None: - for layer in new_raster.iloc: - layer._close = tfile.close - - return new_raster - - def aggregate( - self, - out_shape, - resampling="nearest", - file_path=None, - in_memory=False, - driver="GTiff", - dtype=None, - nodata=None, - **kwargs - ): - """Aggregates a raster to (usually) a coarser grid cell size. - - Parameters - ---------- - out_shape : tuple - New shape in (rows, cols). - - resampling : str (default 'nearest') - Resampling method to use when applying decimated reads when - out_shape is specified. Supported methods are: 'average', - 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', - 'max', 'med', 'min', 'mode', 'q1', 'q3'. - - file_path : str (optional, default None) - File path to save to cropped raster. If not supplied then - the aggregated raster is saved to a temporary file. - - in_memory : bool, default is False - Whether to initiated the Raster from an array and store - the data in-memory using Rasterio's in-memory files. - - driver : str (default 'GTiff') - Named of GDAL-supported driver for file export. - - dtype : str (optional, default None) - Coerce RasterLayers to the specified dtype. If not - specified then the new intersected Raster is created using - the dtype of the existing Raster dataset, which uses a - dtype that can accommodate the data types of all of the - individual RasterLayers. - - nodata : any number (optional, default None) - Nodata value for new dataset. If not specified then a - nodata value is set based on the minimum permissible value - of the Raster's dtype. Note that this does not change the - pixel nodata values of the raster, it only changes the - metadata of what value represents a nodata pixel. - - kwargs : opt - Optional named arguments to pass to the format drivers. For - example can be `compress="deflate"` to add compression. - - Returns - ------- - Raster - Raster object aggregated to a new pixel size. - """ - tfile = None - - if in_memory is False: - file_path, tfile = self._tempfile(file_path) - - rows, cols = out_shape - arr = self.read(masked=True, out_shape=out_shape, resampling=resampling) - meta = self.meta.copy() - dtype = self._check_supported_dtype(dtype) - - if nodata is None: - nodata = get_nodata_value(dtype) - - arr = arr.filled(fill_value=nodata) - - meta["driver"] = driver - meta["nodata"] = nodata - meta["height"] = rows - meta["width"] = cols - meta["dtype"] = dtype - bnd = self.bounds - meta["transform"] = rasterio.transform.from_bounds( - west=bnd.left, - south=bnd.bottom, - east=bnd.right, - north=bnd.top, - width=cols, - height=rows, - ) - meta.update(kwargs) - - if in_memory is False: - with rasterio.open(file_path, "w", **meta) as dst: - dst.write(arr.astype(dtype)) - output_dst = file_path - - else: - with MemoryFile() as memfile: - dst = memfile.open(**meta) - dst.write(arr.astype(dtype)) - - output_dst = [ - RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) - ] - for i in output_dst: - i.in_memory = True - - new_raster = self._copy(output_dst, self.names) - - if tfile is not None: - for layer in new_raster.iloc: - layer._close = tfile.close - - return new_raster - - def apply( - self, - function, - file_path=None, - in_memory=False, - driver="GTiff", - dtype=None, - nodata=None, - progress=False, - function_args={}, - **kwargs - ): - """Apply user-supplied function to a Raster object. - - Parameters - ---------- - function : function - Function that takes an numpy array as a single argument. - - file_path : str (optional, default None) - Optional path to save calculated Raster object. If not - specified then a tempfile is used. - - in_memory : bool, default is False - Whether to initiated the Raster from an array and store the - data in-memory using Rasterio's in-memory files. - - driver : str (default 'GTiff') - Named of GDAL-supported driver for file export. - - dtype : str (optional, default None) - Coerce RasterLayers to the specified dtype. If not - specified then the new Raster is created using the dtype of - the calculation result. - - nodata : any number (optional, default None) - Nodata value for new dataset. If not specified then a - nodata value is set based on the minimum permissible value - of the Raster's data type. Note that this changes the - values of the pixels that represent nodata pixels. - - progress : bool (default False) - Optionally show progress of transform operations. - - function_args : dict (optional) - Optionally pass arguments to the `function` as a dict or - keyword arguments. - - kwargs : opt - Optional named arguments to pass to the format drivers. - For example can be `compress="deflate"` to add compression. - - Returns - ------- - Raster - Raster containing the calculated result. - """ - tfile = None - - if in_memory is False: - file_path, tfile = self._tempfile(file_path) - - function = partial(function, **function_args) - - # perform test calculation determine dimensions, dtype, nodata - window = next(self.block_shapes(*self.block_shape)) - img = self.read(masked=True, window=window) - arr = function(img, **function_args) - - if arr.ndim > 2: - indexes = np.arange(1, arr.shape[0] + 1) - count = len(indexes) - else: - indexes = 1 - count = 1 - - dtype = self._check_supported_dtype(dtype) - - if nodata is None: - nodata = get_nodata_value(dtype) - - # open output file with updated metadata - meta = self.meta.copy() - meta.update(driver=driver, count=count, dtype=dtype, nodata=nodata) - meta.update(kwargs) - - # get windows - windows = [w for w in self.block_shapes(*self.block_shape)] - data_gen = (self.read(window=w, masked=True) for w in windows) - counter = tqdm(windows, total=len(windows), disable=not progress) - - if in_memory is False: - with rasterio.open(file_path, "w", **meta) as dst: - for w, res, pbar in zip(windows, map(function, data_gen), counter): - res = np.ma.filled(res, fill_value=nodata) - dst.write(res.astype(dtype), window=w, indexes=indexes) - - output_dst = file_path - - else: - with MemoryFile() as memfile: - dst = memfile.open(**meta) - for w, res, pbar in zip(windows, map(function, data_gen), counter): - res = np.ma.filled(res, fill_value=nodata) - dst.write(res.astype(dtype), window=w, indexes=indexes) - - output_dst = [ - RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) - ] - - for i in output_dst: - i.in_memory = True - - # create new raster object with result - new_raster = self._copy(output_dst) - - # override close method - if tfile is not None: - for layer in new_raster.iloc: - layer._close = tfile.close - - return new_raster - - def to_pandas(self, max_pixels=None, resampling="nearest") -> pd.DataFrame: - """Raster to pandas DataFrame. - - Parameters - ---------- - max_pixels: int (default None) - Maximum number of pixels to sample. By default all pixels - are used. - - resampling : str (default 'nearest') - Resampling method to use when applying decimated reads when - out_shape is specified. Supported methods are: 'average', - 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', - 'max', 'med', 'min', 'mode', 'q1', 'q3'. - - Returns - ------- - pandas.DataFrame - DataFrame containing values of names of RasterLayers in - the Raster as columns, and pixel values as rows. - """ - - # read dataset using decimated reads - if max_pixels is not None: - rel_width = self.shape[1] / max_pixels - - if rel_width > 1: - col_scaling = round(max_pixels / rel_width) - row_scaling = max_pixels - col_scaling - else: - col_scaling = round(max_pixels * rel_width) - row_scaling = max_pixels - col_scaling - else: - row_scaling, col_scaling = self.shape[0], self.shape[1] - - out_shape = (row_scaling, col_scaling) - arr = self.read(masked=True, out_shape=out_shape, resampling=resampling) - bands, rows, cols = arr.shape - nodatavals = self.nodatavals - - # x and y grid coordinate arrays - x_range = np.linspace(start=self.bounds.left, stop=self.bounds.right, num=cols) - y_range = np.linspace(start=self.bounds.top, stop=self.bounds.bottom, num=rows) - xs, ys = np.meshgrid(x_range, y_range) - - arr = arr.reshape((bands, rows * cols)) - arr = arr.transpose() - df = pd.DataFrame( - data=np.column_stack((xs.flatten(), ys.flatten(), arr)), - columns=["x", "y"] + list(self.names), - ) - - # set nodata values to nan - for i, col_name in enumerate(self.names): - df.loc[df[col_name] == nodatavals[i], col_name] = np.nan - - return df - - def sample(self, size, strata=None, return_array=False, random_state=None): - """Generates a random sample of according to size, and samples - the pixel values. - - Parameters - ---------- - size : int - Number of random samples or number of samples per strata if - a `strata` object is supplied. - - strata : pyspatialml Raster object (opt) - Whether to use stratified instead of random sampling. Strata - can be supplied using another pyspatialml.Raster object. - - return_array : bool (opt), default=False - Optionally return extracted data as separate X and xy - masked numpy arrays. - - random_state : int (opt) - integer to use within random.seed. - - Returns - ------- - pandas.DataFrame - DataFrame containing values of names of RasterLayers in the Raster - if `return_array` is False. - - tuple - A tuple containing two elements if `return_array` is True: - - - numpy.ndarray - Numpy array of extracted raster values, typically 2d. - - numpy.ndarray - 2D numpy array of xy coordinates of extracted values. - """ - # set the seed - np.random.seed(seed=random_state) - - if not strata: - # create np array to store randomly sampled data - valid_samples = np.zeros((0, self.count)) - valid_coordinates = np.zeros((0, 2)) - - # loop until target number of samples is satisfied - satisfied = False - - n = size - while satisfied is False: - - # generate random row and column indices - Xsample = np.random.choice(range(0, self.width), n) - Ysample = np.random.choice(range(0, self.height), n) - - # create 2d numpy array with sample locations set to 1 - sample_raster = np.empty((self.height, self.width)) - sample_raster[:] = np.nan - sample_raster[Ysample, Xsample] = 1 - - # get indices of sample locations - rows, cols = np.nonzero(np.isnan(sample_raster) == False) - - # convert row, col indices to coordinates - xy = np.transpose(rasterio.transform.xy(self.transform, rows, cols)) - - # sample at random point locations - samples = self.extract_xy_chunked(xs=xy[:, 0], ys=xy[:, 1]) - - # append only non-masked data to each row of X_random - samples = samples.astype("float32").filled(np.nan) - invalid_ind = np.isnan(samples).any(axis=1) - samples = samples[~invalid_ind, :] - valid_samples = np.append(valid_samples, samples, axis=0) - - xy = xy[~invalid_ind, :] - valid_coordinates = np.append(valid_coordinates, xy, axis=0) - - # check to see if target_nsamples has been reached - if len(valid_samples) >= size: - satisfied = True - else: - n = size - len(valid_samples) - - else: - if strata.count != 1: - raise AttributeError( - "Strata must be a Raster object with a single band." - ) - - # get number of unique categories - strata_arr = strata.iloc[0].read(masked=True) - categories = np.unique(strata_arr.flatten()) - categories = categories[~categories.mask] - - # store selected coordinates - selected = np.zeros((0, 2)) - - for cat in categories: - - # get row,col positions for cat strata - ind = np.transpose(np.nonzero(strata_arr == cat)) - - if size > ind.shape[0]: - msg = ( - "Sample size is greater than number of pixels in " "strata {}" - ).format(str(ind)) - - msg = os.linesep.join([msg, "Sampling using replacement"]) - Warning(msg) - - # random sample - sample = np.random.uniform(0, ind.shape[0], size).astype("int") - xy = ind[sample, :] - - selected = np.append(selected, xy, axis=0) - - # convert row, col indices to coordinates - x, y = rasterio.transform.xy( - transform=self.transform, rows=selected[:, 0], cols=selected[:, 1] - ) - valid_coordinates = np.column_stack((x, y)) - - # extract data - valid_samples = self.extract_xy_chunked( - xs=valid_coordinates[:, 0], ys=valid_coordinates[:, 1] - ) - - # return as geopandas array as default (or numpy arrays) - if return_array is False: - gdf = pd.DataFrame(valid_samples, columns=self.names) - gdf["geometry"] = list( - zip(valid_coordinates[:, 0], valid_coordinates[:, 1]) - ) - gdf["geometry"] = gdf["geometry"].apply(Point) - gdf = gpd.GeoDataFrame(gdf, geometry="geometry", crs=self.crs) - return gdf - else: - return valid_samples, valid_coordinates - - def extract_xy(self, xys, return_array=False, progress=False): - """Samples pixel values using an array of xy locations. - - Parameters - ---------- - xys : 2d array-like - x and y coordinates from which to sample the raster - (n_samples, xys). - - return_array : bool (opt), default=False - By default the extracted pixel values are returned as a - geopandas.GeoDataFrame. If `return_array=True` then the - extracted pixel values are returned as a tuple of - numpy.ndarrays. - - progress : bool (opt), default=False - Show a progress bar for extraction. - - Returns - ------- - geopandas.GeoDataframe - Containing extracted data as point geometries if - `return_array=False`. - - numpy.ndarray - 2d masked array containing sampled raster values (sample, - bands) at the x,y locations. - """ - # extract pixel values - dtype = np.find_common_type([np.float32], self.dtypes) - X = np.ma.zeros((xys.shape[0], self.count), dtype=dtype) - t = tqdm(self.loc.values(), total=self.count, disable=not progress) - - for i, (layer, pbar) in enumerate(zip(self.loc.values(), t)): - sampler = sample_gen( - dataset=layer.ds, xy=xys, indexes=layer.bidx, masked=True - ) - v = np.ma.asarray([i for i in sampler]) - X[:, i] = v.flatten() - - # return as geopandas array as default (or numpy arrays) - if return_array is False: - gdf = pd.DataFrame(X, columns=self.names) - gdf["geometry"] = list(zip(xys[:, 0], xys[:, 1])) - gdf["geometry"] = gdf["geometry"].apply(Point) - gdf = gpd.GeoDataFrame(gdf, geometry="geometry", crs=self.crs) - return gdf - - return X - - def extract_xy_chunked(self, xs, ys, progress=False): - rows, cols = rowcol(self.transform, xs, ys) - rowcol_idx = np.column_stack((rows, cols)) - pixel_index = np.arange(rowcol_idx.shape[0]) - - # get row, col positions that are outside of the raster - negative_idx = (rowcol_idx < 0).any(axis=1) - outside_idx = (rowcol_idx[:, 0] >= self.shape[0]) | ( - rowcol_idx[:, 1] >= self.shape[1] - ) - - outsiders = np.logical_or(negative_idx, outside_idx) - valid = np.nonzero(outsiders == False)[0] - invalid = np.nonzero(outsiders == True)[0] - - # remove row, col > shape - rowcol_idx = rowcol_idx[~outsiders, :] - pixel_index = pixel_index[~outsiders] - - # lookup pixel values at row, col positons by chunk - windows = [w for w in self.block_shapes(*self.block_shape)] - data_gen = (self.read(window=w, masked=True) for w in windows) - t = tqdm(windows, total=len(windows), disable=not progress) - - dtype = np.result_type(np.float32, *self.dtypes) - X = np.ma.zeros((self.count, 0), dtype=dtype) - pixel_indices = np.zeros(0, dtype="int") - - for w, data, pbar in zip(windows, data_gen, t): - res, chunk_pixels = self.extract_by_chunk(data, w, rowcol_idx, pixel_index) - X = np.ma.concatenate((X, res), axis=1) - pixel_indices = np.concatenate((pixel_indices, chunk_pixels)) - - X = X.transpose((1, 0)) - - # insert empty rows to make input dimensions match output - output_arr = np.ma.zeros((len(rows), self.count)) - output_arr[pixel_indices, :] = X - output_arr[invalid, :].mask = True - output_arr[invalid, :] = None - - return output_arr - - def extract_vector(self, gdf, progress=False): - """Sample a Raster/RasterLayer using a geopandas GeoDataframe - containing points, lines or polygon features. - - Parameters - ---------- - gdf: geopandas.GeoDataFrame - Containing either point, line or polygon geometries. - Overlapping geometries will cause the same pixels to be - sampled. - - progress : bool (opt), default=False - Show a progress bar for extraction. - - Returns - ------- - geopandas.GeoDataframe - Containing extracted data as point geometries (one point - per pixel). The resulting GeoDataFrame is indexed using - a named pandas.MultiIndex, with `pixel_idx` index - referring to the index of each pixel that was sampled, and - the `geometry_idx` index referring to the index of the each - geometry in the supplied `gdf`. This makes it possible to - keep track of how sampled pixel relates to the original - geometries, i.e. multiple pixels being extracted within - the area of a single polygon that can be referred to using - the `geometry_idx`. - - The extracted data can subsequently be joined with the - attribute table of the supplied `gdf` using: - - training_py = geopandas.read_file(nc.polygons) - df = self.stack.extract_vector(gdf=training_py) - df = df.dropna() - - df = df.merge( - right=training_py.loc[:, ("id", "label")], - left_on="polygon_idx", - right_on="id", - right_index=True - ) - """ - # rasterize polygon and line geometries - if all(gdf.geom_type == "Polygon") or all(gdf.geom_type == "LineString"): - - shapes = [(geom, val) for geom, val in zip(gdf.geometry, gdf.index)] - arr = np.ma.zeros((self.height, self.width)) - arr[:] = -99999 - - arr = features.rasterize( - shapes=shapes, - fill=-99999, - out=arr, - transform=self.transform, - all_touched=True, - ) - - ids = arr[np.nonzero(arr != -99999)] - ids = ids.astype("int") - rows, cols = np.nonzero(arr != -99999) - xys = rasterio.transform.xy(transform=self.transform, rows=rows, cols=cols) - xys = np.transpose(xys) - - elif all(gdf.geom_type == "Point"): - ids = gdf.index.values - xys = gdf.bounds.iloc[:, 2:].values - - # extract raster pixels - X = self.extract_xy_chunked(xs=xys[:, 0], ys=xys[:, 1], progress=progress) - - # return as geopandas array as default (or numpy arrays) - X = pd.DataFrame( - data=X, columns=list(self.names), index=[pd.RangeIndex(0, X.shape[0]), ids] - ) - X.index.set_names(["pixel_idx", "geometry_idx"], inplace=True) - X["geometry"] = list(zip(xys[:, 0], xys[:, 1])) - X["geometry"] = X["geometry"].apply(Point) - X = gpd.GeoDataFrame(X, geometry="geometry", crs=self.crs) - - return X - - def extract_raster(self, src, progress=False): - """Sample a Raster object by an aligned raster of labelled pixels. - - Parameters - ---------- - src: rasterio DatasetReader - Single band raster containing labelled pixels as an open - rasterio DatasetReader object. - - progress : bool (opt), default=False - Show a progress bar for extraction. - - Returns - ------- - geopandas.GeoDataFrame - Geodataframe containing extracted data as point features if - `return_array=False` - """ - # open response raster and get labelled pixel indices and values - arr = src.read(1, masked=True) - rows, cols = np.nonzero(~arr.mask) - xys = np.transpose(rasterio.transform.xy(src.transform, rows, cols)) - ys = arr.data[rows, cols] - - # extract Raster object values at row, col indices - X = self.extract_xy_chunked(xs=xys[:, 0], ys=xys[:, 1], progress=progress) - - # summarize data - column_names = ["value"] + list(self.names) - gdf = pd.DataFrame(data=np.ma.column_stack((ys, X)), columns=column_names) - gdf["geometry"] = list(zip(xys[:, 0], xys[:, 1])) - gdf["geometry"] = gdf["geometry"].apply(Point) - gdf = gpd.GeoDataFrame(gdf, geometry="geometry", crs=self.crs) - - return gdf - - @staticmethod - def extract_by_chunk(arr, w, idx, pixel_idx): - d = idx.copy() - pixel_idx = pixel_idx.copy() - - # subtract chunk offset from row, col positions - d[:, 0] = d[:, 0] - w.row_off - d[:, 1] = d[:, 1] - w.col_off - - # remove negative row, col positions - pos = (d >= 0).all(axis=1) - d = d[pos, :] - pixel_idx = pixel_idx[pos] - - # remove row, col > shape - within_range = (d[:, 0] < arr.shape[1]) & (d[:, 1] < arr.shape[2]) - d = d[within_range, :] - pixel_idx = pixel_idx[within_range] - - extracted_data = arr[:, d[:, 0], d[:, 1]] - return (extracted_data, pixel_idx) - - def scale( - self, - centre=True, - scale=True, - file_path=None, - in_memory=False, - driver="GTiff", - dtype=None, - nodata=None, - progress=False, - ): - """Standardize (centre and scale) a Raster object by - subtracting the mean and dividing by the standard deviation for - each layer in the object. - - The mean and standard deviation statistics are calculated - for each layer separately. - - Parameters - ---------- - centre : bool, default is True - Whether to subtract the mean from each layer. - - scale : bool, default is True - Whether to divide each layer by the standard deviation of - the layer. - - file_path : str (optional, default None) - Path to a GeoTiff raster for the prediction results. If - not specified then the output is written to a temporary - file. - - in_memory : bool, default is False - Whether to initiated the Raster from an array and store the - data in-memory using Rasterio's in-memory files. - - driver : str (default 'GTiff') - Named of GDAL-supported driver for file export. - - dtype : str (optional, default None) - Optionally specify a GDAL compatible data type when saving - to file. If not specified, a data type is set based on the - data type of the prediction. - - nodata : any number (optional, default None) - Nodata value for file export. If not specified then the - nodata value is derived from the minimum permissible value - for the given data type. - - progress : bool (default False) - Show progress bar for operation. - - Returns - ------- - Pyspatialml.Raster object with rescaled data. - """ - - def scaler(x, means, sds): - for i, m, z in zip(range(x.shape[0]), means, sds): - x[i, :, :] = (x[i, :, :] - m) / z - return x - - if centre is True: - means = self.mean() - else: - means = np.repeat(0, self.count) - - if scale is True: - sds = self.stddev() - else: - sds = np.repeat(1, self.count) - - res = self.apply( - scaler, - file_path=file_path, - in_memory=in_memory, - driver=driver, - dtype=dtype, - nodata=nodata, - progress=progress, - function_args=dict(means=means, sds=sds), - ) - - return res - - def alter( - self, - transformer, - file_path=None, - in_memory=False, - driver="GTiff", - dtype=None, - nodata=None, - progress=False, - ): - """Apply a fitted scikit-learn transformer to a Raster object. - - Can be used to transform a raster using methods such as StandardScaler, - RobustScaler etc. - - Parameters - ---------- - transformer : a sklearn.preprocessing.Transformer object - - file_path : str (optional, default None) - Path to a GeoTiff raster for the prediction results. If - not specified then the output is written to a temporary - file. - - in_memory : bool, default is False - Whether to initiated the Raster from an array and store the - data in-memory using Rasterio's in-memory files. - - driver : str (default 'GTiff') - Named of GDAL-supported driver for file export. - - dtype : str (optional, default None) - Optionally specify a GDAL compatible data type when saving - to file. If not specified, a data type is set based on the - data type of the prediction. - - nodata : any number (optional, default None) - Nodata value for file export. If not specified then the - nodata value is derived from the minimum permissible value - for the given data type. - - progress : bool (default False) - Show progress bar for operation. - - Returns - ------- - Pyspatialml.Raster object with transformed data. - """ - res = self.apply( - self._apply_transformer, - file_path=file_path, - in_memory=in_memory, - driver=driver, - dtype=dtype, - nodata=nodata, - progress=progress, - function_args={"transformer": transformer}, - ) - - return res - - -class TempRasterLayer: - """Create a NamedTemporaryFile like object on Windows that has a - close method - - Workaround used on Windows which cannot open the file a second time - """ - - def __init__(self, tempdir=tempfile.tempdir): - self.tfile = tempfile.NamedTemporaryFile(dir=tempdir, suffix=".tif").name - self.name = self.tfile - - def close(self): - os.unlink(self.tfile) +import os +import tempfile +from collections import namedtuple +from collections.abc import ValuesView +from functools import partial +from typing import Tuple +import affine + +import geopandas as gpd +import numpy as np +import pandas as pd +import rasterio +import rasterio.mask +import rasterio.plot +from rasterio import features +from rasterio.io import MemoryFile +from rasterio.sample import sample_gen +from rasterio.warp import calculate_default_transform, reproject +from rasterio.windows import Window +from rasterio.transform import rowcol +from shapely.geometry import Point +from tqdm import tqdm +from collections import Counter + +from ._plotting import RasterPlotMixin +from ._prediction import ( + predict_multioutput, + predict_output, + predict_prob, + stack_constants, +) +from ._utils import get_nodata_value +from .rasterlayer import RasterLayer +from ._rasterstats import RasterStatsMixin +from .locindexer import _LocIndexer + + +class Raster(_LocIndexer, RasterStatsMixin, RasterPlotMixin): + """Creates a collection of file-based GDAL-supported raster + datasets that share a common coordinate reference system and + geometry. + + Raster objects encapsulate RasterLayer objects, which represent + single band raster datasets that can physically be represented by + either separate single-band raster files, multi-band raster files, + or any combination of individual bands from multi-band raster and + single-band raster datasets. + + Attributes + ---------- + files : list + A list of the raster dataset files that are used in the Raster. + This does not have to be the same length as the number of + RasterLayers because some files may have multiple bands. + + meta : dict + A dict containing the raster metadata. The dict contains the + following keys/values: + + crs : the crs object + transform : the Affine.affine transform object + width : width of the Raster in pixels + height : height of the Raster in pixels + count : number of RasterLayers within the Raster + dtype : the numpy datatype that represents lowest common + denominator of the different dtypes for all of the layers + in the Raster. + + names : list + A list of the RasterLayer names. + + block_shape : tuple + The default block_shape in (rows, cols) for reading windows of data + in the Raster for out-of-memory processing. + """ + + def __init__( + self, + src, + crs=None, + transform=None, + nodata=None, + file_path=None, + driver=None, + tempdir=tempfile.tempdir, + in_memory=False, + ): + """Initiate a new Raster object + + Parameters + ---------- + src : file path, RasterLayer, rasterio dataset, or a ndarray + Initiate a Raster object from any combination of a file + path or list of file paths to GDAL-supported raster + datasets, RasterLayer objects, or directly from a rasterio + dataset or band object that is opened in 'r' or 'rw' mode. + + A Raster object can also be created directly from a numpy + array in [band, rows, cols] order. The additional arguments + `crs` and `transform` should also be provided to supply + spatial coordinate information. + + crs : rasterio.crs.CRS object (optional, default is None) + CRS object containing projection information for data if + provided as an array. + + transform : affine.Affine object (optional, default is None) + Affine object containing transform information for data if + provided as an array. + + nodata : any number (optional, default is None) + Assign a nodata value to the Raster dataset when `src` is + a ndarray. If a nodata value is not specified then it is + determined based on the minimum permissible value for the + array's data type. + + file_path : str (optional, default None) + Path to save new Raster object if created from an array. + + driver : str (optional, default=None) + A GDAL compatible driver to use when initiating a raster + from a numpy array. + + tempdir : str, default is tempfile.tempdir + Path to a directory to store temporary files that are + produced during geoprocessing operations. + + in_memory : bool, default is False + Whether to initiate the Raster from an array and store the + data in-memory using Rasterio's in-memory files. + + Returns + ------- + pyspatialml.Raster + Raster object containing the src layers stacked into a + single object. + """ + self.files = list() + self.meta = None + self._block_shape = (256, 256) + self.tempdir = tempdir + self._internal = frozenset( + ["_internal", "files", "meta", "_block_shape", "tempdir"] + ) + + src_layers = [] + + # get temporary file name if file_path is None + if file_path is None and isinstance(src, np.ndarray): + file_path, tfile = self._tempfile(file_path) + driver = "GTiff" + + # initiate from numpy array + if isinstance(src, np.ndarray): + if src.ndim == 2: + src = src[np.newaxis] + count, height, width = src.shape + + if in_memory is True: + memfile = MemoryFile() + dst = memfile.open( + height=height, + width=width, + count=count, + driver=driver, + dtype=src.dtype, + crs=crs, + transform=transform, + nodata=nodata, + ) + dst.write(src) + else: + with rasterio.open( + file_path, + mode="w", + driver=driver, + height=height, + width=width, + count=count, + dtype=src.dtype, + crs=crs, + transform=transform, + nodata=nodata, + ) as dst: + dst.write(src) + dst = rasterio.open(file_path, "r") + + for i in range(dst.count): + band = rasterio.band(dst, i + 1) + rasterlayer = RasterLayer(band) + if in_memory is True: + rasterlayer.in_memory = True + src_layers.append(rasterlayer) + + if tfile is not None and in_memory is False: + for layer in src_layers: + layer._close = tfile.close + self._layers = src_layers + return + + # from a single file path + elif isinstance(src, str): + src_layers = [] + r = rasterio.open(src, mode="r", driver=driver) + for i in range(r.count): + band = rasterio.band(r, i + 1) + src_layers.append(RasterLayer(band)) + self._layers = src_layers + return + + # from a single RasterLayer + elif isinstance(src, RasterLayer): + self._layers = src + self._rename_inplace(list(self.names)[0], src.name) + return + + # from a single Raster + elif isinstance(src, Raster): + self._layers = [i for i in src.values()] + for old, new in zip(self.names, list(src.names)): + self._rename_inplace(old, new) + return + + # from a single rasterio.io.datasetreader/writer + elif isinstance(src, rasterio.io.DatasetReader): + src_layers = [] + for i in range(src.count): + band = rasterio.band(src, i + 1) + src_layers.append(RasterLayer(band)) + self._layers = src_layers + return + + # from a single rasterio.band object + elif isinstance(src, rasterio.Band): + self._layers = RasterLayer(src) + return + + # from a list of objects + elif isinstance(src, list): + # list of file paths (str) + if all(isinstance(x, str) for x in src): + src_layers = [] + for f in src: + r = rasterio.open(f, mode="r", driver=driver) + for i in range(r.count): + band = rasterio.band(r, i + 1) + src_layers.append(RasterLayer(band)) + + self._layers = src_layers + return + + # list of RasterLayer objects + elif all(isinstance(x, RasterLayer) for x in src): + self._layers = src + for old, new in zip(self.names, src): + self._rename_inplace(old, new.name) + return + + # list of rasterio.io.datasetreader objects + elif all(isinstance(x, rasterio.io.DatasetReader) for x in src): + src_layers = [] + for r in src: + for i in range(r.count): + band = rasterio.band(r, i + 1) + src_layers.append(RasterLayer(band)) + self._layers = src_layers + return + + # from a list of rasterio.band objects + elif all(isinstance(x, rasterio.Band) for x in src): + src_layers = [] + for band in src: + src_layers.append(RasterLayer(band)) + self._layers = src_layers + return + else: + raise ValueError("Cannot create a Raster object from a mixture of inputs") + + @property + def block_shape(self) -> Tuple[int, int]: + """Return the block shape in (height, width) used to read windows from the + Raster + """ + return self._block_shape + + @block_shape.setter + def block_shape(self, value) -> None: + if not isinstance(value, tuple): + raise ValueError( + "block_shape must be set using an integer tuple as (rows, " "cols)" + ) + rows, cols = value + + if not isinstance(rows, int) or not isinstance(cols, int): + raise ValueError( + "tuple must consist of integer values referring to number of " + "rows, cols" + ) + self._block_shape = (rows, cols) + + def set_block_shape(self, value) -> None: + """Set the block shape of the raster, i.e. the height and width + of windows to read in chunks for the predict, predict_proba, + apply, and other supported-methods. + + Note block shape can also be set with `myraster.block_shape = (500, 500)` + + Parameters + ---------- + value : tuple + A tuple of (height, width) for the block window + """ + self.block_shape = value + + @property + def count(self) -> int: + """Return the number of layers in the Raster""" + return len(self.loc) + + @property + def crs(self) -> rasterio.crs.CRS: + """Return to crs of the Raster""" + return self.meta["crs"] + + @crs.setter + def crs(self, value) -> None: + self.meta["crs"] = value + + @property + def transform(self) -> affine.Affine: + """Return the transform of the Raster""" + return self.meta["transform"] + + @transform.setter + def transform(self, value) -> None: + self.meta["transform"] = value + + @property + def width(self) -> int: + """Return the width (number of columns) in the Raster""" + return self.meta["width"] + + @property + def height(self) -> int: + """Return the height (number of rows) in the Raster""" + return self.meta["height"] + + @property + def shape(self) -> Tuple[int, int]: + """Return the shape (height, width) of the Raster""" + return self.height, self.width + + @property + def res(self) -> Tuple[float, float]: + """Return a tuple of the resolution of the Raster in (width, height)""" + return abs(self.meta["transform"].a), abs(self.meta["transform"].e) + + @property + def bounds(self) -> namedtuple: + """Return the bounding box of the raster in (left, bottom, right, top)""" + bounds = rasterio.transform.array_bounds( + self.height, self.width, self.transform + ) + BoundingBox = namedtuple("BoundingBox", ["left", "bottom", "right", "top"]) + return BoundingBox(bounds[0], bounds[1], bounds[2], bounds[3]) + + @property + def dtypes(self) -> list: + """Return the dtype of each layer in the Raster as a list""" + dtypes = list() + + for layer in self.loc.values(): + dtypes.append(layer.dtype) + + return dtypes + + @property + def nodatavals(self) -> list: + """Return the nodata value of each layer in the Raster as a list""" + nodatavals = list() + + for layer in self.loc.values(): + try: + nodatavals.append(layer.nodata) + except: + nodatavals.append(None) + + return nodatavals + + @property + def _layers(self) -> dict: + return self.loc + + @_layers.setter + def _layers(self, layers) -> None: + """Assign RasterLayer objects to the Raster + + The function assigns the layers to the loc indexer, updates + the `files` attribute and assigns syntactically-correct names + to each layer. + + Parameters + ---------- + layers : list + A list of pyspatialml.RasterLayer objects + """ + if isinstance(layers, RasterLayer): + layers = [layers] + + if all(isinstance(x, type(layers[0])) for x in layers) is False: + raise ValueError("Cannot create a Raster object from a mixture of inputs") + + meta = self._check_alignment(layers) + + if meta is False: + raise ValueError( + "Raster datasets do not have the same dimensions/transform" + ) + + # reset locindexer + self.files = list() + for key in self.loc.keys(): + self.loc.pop(key) + + # update global Raster object attributes with new values + names = [i.name for i in layers] + names = self._fix_names(names) + + # update attributes per dataset + for layer, name in zip(layers, names): + self.files.append(layer.file) + layer.name = name + self.loc[name] = layer + + self.meta = dict( + crs=meta["crs"], + transform=meta["transform"], + width=meta["width"], + height=meta["height"], + count=self.count, + dtype=np.result_type(*self.dtypes), + ) + + @staticmethod + def _fix_names(combined_names): + """Adjusts the names of pyspatialml.RasterLayer objects within the + Raster when appending new layers. + + This avoids the Raster object containing duplicated names in the + case that multiple RasterLayers are appended with the same name. + + In the case of duplicated names, the RasterLayer names are appended + with a `_n` with n = 1, 2, 3 .. n. + + Parameters + ---------- + combined_names : list + List of str representing names of RasterLayers. Any duplicates + will have a suffix appended to them. + + Returns + ------- + list + List with adjusted names + """ + counts = Counter(combined_names) + + for s, num in counts.items(): + if num > 1: + for suffix in range(1, num + 1): + if s + "_" + str(suffix) not in combined_names: + combined_names[combined_names.index(s)] = s + "_" + str(suffix) + else: + i = 1 + while s + "_" + str(i) in combined_names: + i += 1 + combined_names[combined_names.index(s)] = s + "_" + str(i) + + return combined_names + + @staticmethod + def _check_alignment(layers): + """Check that a list of raster datasets are aligned with the same + pixel dimensions and geotransforms. + + Parameters + ---------- + layers : list + List of pyspatialml.RasterLayer objects. + + Returns + ------- + dict or False + Dict of metadata if all layers are spatially aligned, otherwise + returns False. + """ + + src_meta = [] + for layer in layers: + src_meta.append(layer.ds.meta.copy()) + + if not all(i["crs"] == src_meta[0]["crs"] for i in src_meta): + Warning("crs of all rasters does not match, possible unintended consequences") + + if not all( + [ + i["height"] == src_meta[0]["height"] + or i["width"] == src_meta[0]["width"] + or i["transform"] == src_meta[0]["transform"] + for i in src_meta + ] + ): + return False + + else: + return src_meta[0] + + def _check_supported_dtype(self, dtype=None) -> str: + """Method to check that a dtype is compatible with GDAL or + generate a compatible dtype from an array + + Parameters + ---------- + dtype : str, dtype, ndarray or None + Pass a dtype (as a string or dtype) to check compatibility. + Pass an array to generate a compatible dtype from the + array. Pass None to use the existing dtype of the parent + Raster object. + + Returns + ------- + dtype : dtype + GDAL compatible dtype + """ + if dtype is None: + dtype = self.meta["dtype"] + + elif isinstance(dtype, np.ndarray): + dtype = rasterio.dtypes.get_minimum_dtype(dtype) + + else: + if rasterio.dtypes.check_dtype(dtype) is False: + raise AttributeError( + "{dtype} is not a support GDAL dtype".format(dtype=dtype) + ) + + return dtype + + def _tempfile(self, file_path) -> Tuple[str, str]: + """Returns a TemporaryFileWrapper and file path if a file_path + parameter is None + """ + if file_path is None: + if os.name != "nt": + tfile = tempfile.NamedTemporaryFile(dir=self.tempdir, suffix=".tif") + file_path = tfile.name + else: + tfile = TempRasterLayer() + file_path = tfile.name + + else: + tfile = None + + return file_path, tfile + + def _copy(self, src, names=None): + """Return a new Raster object from a list of files but + retaining the attributes of the parent Raster. + + Designed to be used internally to copy a Raster object. + + Parameters + ---------- + src : List of RasterLayers or file paths + List of RasterLayers or file paths used create the new + Raster object. + + names : list (optional, default None) + List to name the RasterLayer objects in the stack. If not + supplied then the names will be generated from the file + names. + + Returns + ------- + pyspatialml.Raster + """ + if not isinstance(src, (list, ValuesView)): + src = [src] + + raster = Raster(src) + + # rename and copy attributes + if names is not None: + for (old, new) in zip(raster.names, names): + raster._rename_inplace(old, new) + + for old_layer, new_layer in zip(self.loc.values(), list(raster.loc.values())): + new_layer.cmap = old_layer.cmap + new_layer.norm = old_layer.norm + new_layer.categorical = old_layer.categorical + + raster.block_shape = self.block_shape + + return raster + + @staticmethod + def _apply_transformer(img, transformer): + img = np.ma.masked_invalid(img) + mask = img.mask.copy() + + # reshape into 2D array + n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] + flat_pixels = img.reshape((rows * cols, n_features)) + flat_pixels = flat_pixels.filled(0) + + # predict and replace mask + result = transformer.transform(flat_pixels) + + # reshape the prediction from a 1D into 3D array [band, row, col] + result = result.reshape((n_features, rows, cols)) + result = np.ma.masked_array(data=result, mask=mask, copy=True) + + return result + + def head(self) -> np.ndarray: + """Return the first 10 rows from the Raster as a ndarray""" + window = Window(col_off=0, row_off=0, width=20, height=10) + return self.read(window=window) + + def tail(self) -> np.ndarray: + """Return the last 10 rows from the Raster as a ndarray""" + window = Window( + col_off=self.width - 20, row_off=self.height - 10, width=20, height=10 + ) + return self.read(window=window) + + def close(self) -> None: + """Close all of the RasterLayer objects in the Raster. + + Note that this will cause any rasters based on temporary files + to be removed. This is intended as a method of clearing + temporary files that may have accumulated during an analysis + session. + """ + for layer in self.loc.values(): + layer.close() + + def copy(self, subset=None): + """Creates a shallow copy of a Raster object + + Note that shallow in the context of a Raster object means that + an immutable copy of the object is made, however the on-disk and + in-memory file locations remain the same. + + Parameters + ---------- + subset : opt + A list of layer names to subset while copying. + + Returns + ------- + Raster + """ + if subset is not None: + if isinstance(subset, str): + subset = [subset] + layers = list(self.loc[subset].values()) + else: + layers = list(self.loc.values()) + + return self._copy(layers) + + def block_shapes(self, rows, cols): + """Generator for windows for optimal reading and writing based + on the raster format Windows and returns as a tuple with xoff, + yoff, width, height. + + Parameters + ---------- + rows : int + Height of window in rows. + + cols : int + Width of window in columns. + """ + for i, col in enumerate(range(0, self.width, cols)): + if col + cols < self.width: + num_cols = cols + else: + num_cols = self.width - col + + for j, row in enumerate(range(0, self.height, rows)): + if row + rows < self.height: + num_rows = rows + else: + num_rows = self.height - row + + yield Window(col, row, num_cols, num_rows) + + def read( + self, + masked=False, + window=None, + out_shape=None, + resampling="nearest", + as_df=False, + **kwargs + ) -> np.ndarray: + """Reads data from the Raster object into a numpy array. + + Parameters + ---------- + masked : bool (default False) + Read data into a masked array. + + window : rasterio.window.Window object (optional, default None) + Tuple of col_off, row_off, width, height of a window of + data to read a chunk of data into a ndarray. + + out_shape : tuple (optional, default None) + Shape of shape of array (rows, cols) to read data into + using decimated reads. + + resampling : str (default 'nearest') + Resampling method to use when applying decimated reads when + out_shape is specified. Supported methods are: 'average', + 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', + 'max', 'med', 'min', 'mode', 'q1', 'q3'. + + as_df : bool (default False) + Whether to return the data as a pandas.DataFrame with + columns named by the RasterLayer names. + + **kwargs : dict + Other arguments to pass to rasterio.DatasetReader.read method + + Returns + ------- + ndarray + Raster values in 3d ndarray with the dimensions in order + of (band, row, and column). + """ + dtype = self.meta["dtype"] + + # get window to read from window or height/width of dataset + if window is None: + width = self.width + height = self.height + else: + width = window.width + height = window.height + + # decimated reads using nearest neighbor resampling + if out_shape: + height, width = out_shape + + # read bands separately into numpy array + if masked is True: + arr = np.ma.zeros((self.count, height, width), dtype=dtype) + else: + arr = np.zeros((self.count, height, width), dtype=dtype) + + for i, layer in enumerate(self.loc.values()): + arr[i, :, :] = layer.read( + masked=masked, + window=window, + out_shape=out_shape, + resampling=resampling, + **kwargs + ) + + if masked is True: + arr[i, :, :] = np.ma.MaskedArray( + data=arr[i, :, :], mask=np.isfinite(arr[i, :, :]).mask + ) + + if as_df is True: + # reshape to rows, cols, bands + arr = arr.transpose(1, 2, 0) + arr_flat = arr.reshape((arr.shape[0] * arr.shape[1], arr.shape[2])) + df = pd.DataFrame(data=arr_flat, columns=self.names) + return df + + return arr + + def write( + self, file_path, driver="GTiff", dtype=None, nodata=None, **kwargs + ): + """Write the Raster object to a file. + + Overrides the write RasterBase class method, which is a partial + function of the rasterio.DatasetReader.write method. + + Parameters + ---------- + file_path : str + File path used to save the Raster object. + + driver : str (default is 'GTiff'). + Name of GDAL driver used to save Raster data. + + dtype : str (opt, default None) + Optionally specify a numpy compatible data type when + saving to file. If not specified, a data type is selected + based on the data types of RasterLayers in the Raster + object. + + nodata : any number (opt, default None) + Optionally assign a new nodata value when saving to file. + If not specified a nodata value based on the minimum + permissible value for the data types of RasterLayers in the + Raster object is used. Note that this does not change the + pixel nodata values of the raster, it only changes the + metadata of what value represents a nodata pixel. + + kwargs : opt + Optional named arguments to pass to the format drivers. + For example can be `compress="deflate"` to add compression. + + Returns + ------- + Raster + New Raster object from saved file. + """ + dtype = self._check_supported_dtype(dtype) + + if nodata is None: + nodata = get_nodata_value(dtype) + + meta = self.meta.copy() + meta["driver"] = driver + meta["nodata"] = nodata + meta["dtype"] = dtype + meta.update(kwargs) + + with rasterio.open(file_path, mode="w", **meta) as dst: + + for i, layer in enumerate(self.loc.values()): + arr = layer.read() + arr[arr == layer.nodata] = nodata + dst.write(arr.astype(dtype), i + 1) + + return self._copy(file_path, self.names) + + def predict_proba( + self, + estimator, + file_path=None, + in_memory=False, + indexes=None, + driver="GTiff", + dtype=None, + nodata=None, + constants=None, + progress=False, + **kwargs + ): + """Apply class probability prediction of a scikit learn model to a Raster. + + Parameters + ---------- + estimator : estimator object implementing 'fit' + The object to use to fit the data. + + file_path : str (optional, default None) + Path to a GeoTiff raster for the prediction results. If not + specified then the output is written to a temporary file. + + in_memory : bool, default is False + Whether to initiated the Raster from an array and store the + data in-memory using Rasterio's in-memory files. + + indexes : list of integers (optional, default None) + List of class indices to export. In some circumstances, + only a subset of the class probability estimations are + desired, for instance when performing a binary + classification only the probabilities for the positive + class may be desired. + + driver : str (default 'GTiff') + Named of GDAL-supported driver for file export. + + dtype : str (optional, default None) + Optionally specify a GDAL compatible data type when saving + to file. If not specified, a data type is set based on the + data type of the prediction. + + nodata : any number (optional, default None) + Nodata value for file export. If not specified then the + nodata value is derived from the minimum permissible value + for the given data type. + + progress : bool (default False) + Show progress bar for prediction. + + constants: list-like object or a dict (optional, default None) + Constant features to add to the Raster object with each value + in a list or 1d ndarray representing an additional feature. + + If a list-like object of values os passed, then each numeric + value will be appended as constant features to the last + columns in the data. It is therefore important that all + features including constant features are present in the same + order as what was used to train the model. + + If a dict is passed, then the keys of the dict must refer to + the names of raster layers in the Raster object. In this case, + the values of the dict will replace the values of the raster + layers in the Raster object. + + kwargs : opt + Optional named arguments to pass to the format drivers. + For example can be `compress="deflate"` to add compression. + + Returns + ------- + Raster + Raster containing predicted class probabilities. Each + predicted class is represented by a RasterLayer object. + The RasterLayers are named `prob_n` for 1,2,3..n, with `n` + based on the index position of the classes, not the number + of the class itself. + + For example, a classification model predicting classes with + integer values of 1, 3, and 5 would result in three + RasterLayers named 'prob_1', 'prob_2' and 'prob_3'. + """ + # some checks + tfile = None + + if in_memory is False: + file_path, tfile = self._tempfile(file_path) + + # n_jobs = get_num_workers(n_jobs) + probfun = partial( + predict_prob, + estimator=estimator, + constants=constants, + names=list(self.names), + ) + + # perform test prediction + window = Window(0, 0, 1, 1) + img = self.read(masked=True, window=window) + img = np.ma.masked_invalid(img) + n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] + n_samples = rows * cols + flat_pixels = img.transpose(1, 2, 0).reshape((n_samples, n_features)) + flat_pixels = flat_pixels.filled(0) + + if constants is not None: + flat_pixels = stack_constants(flat_pixels, constants, list(self.names)) + + result = estimator.predict_proba(flat_pixels) + + if isinstance(indexes, int): + indexes = range(indexes, indexes + 1) + + elif indexes is None: + indexes = np.arange(0, result.shape[1]) + + # check dtype and nodata + if dtype is None: + dtype = self._check_supported_dtype(result) + else: + dtype = self._check_supported_dtype(dtype) + + if nodata is None: + nodata = get_nodata_value(dtype) + + # open output file with updated metadata + meta = self.meta.copy() + count = len(indexes) + meta.update(driver=driver, count=count, dtype=dtype, nodata=nodata) + meta.update(kwargs) + + # get windows + windows = [w for w in self.block_shapes(*self.block_shape)] + data_gen = ((w, self.read(window=w, masked=True)) for w in windows) + counter = tqdm(windows, disable=not progress, total=len(windows)) + + # apply prediction function + if in_memory is False: + with rasterio.open(file_path, "w", **meta) as dst: + for w, res, pbar in zip(windows, map(probfun, data_gen), counter): + res = np.ma.filled(res, fill_value=nodata) + dst.write(res[indexes, :, :].astype(dtype), window=w) + + output_dst = file_path + + else: + with MemoryFile() as memfile: + dst = memfile.open( + height=meta["height"], + width=meta["width"], + count=meta["count"], + dtype=meta["dtype"], + crs=meta["crs"], + transform=meta["transform"], + nodata=meta["nodata"], + driver=driver, + ) + + for w, res, pbar in zip(windows, map(probfun, data_gen), counter): + res = np.ma.filled(res, fill_value=nodata) + dst.write(res[indexes, :, :].astype(dtype), window=w) + + output_dst = [ + RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) + ] + + for i in output_dst: + i.in_memory = True + + # create new Raster object with the result + prefix = "prob_" + names = [prefix + str(i) for i in range(len(indexes))] + new_raster = self._copy(output_dst, names) + + # override close method + if tfile is not None: + for layer in new_raster.iloc: + layer._close = tfile.close + + return new_raster + + def predict( + self, + estimator, + file_path=None, + in_memory=False, + driver="GTiff", + dtype=None, + nodata=None, + progress=False, + constants=None, + **kwargs + ): + """Apply prediction of a scikit learn model to a Raster. + + The model can represent any scikit learn model or compatible + api with a `fit` and `predict` method. These can consist of + classification or regression models. Multi-class + classifications and multi-target regressions are also + supported. + + Parameters + ---------- + estimator : estimator object implementing 'fit' + The object to use to fit the data. + + file_path : str (optional, default None) + Path to a GeoTiff raster for the prediction results. If + not specified then the output is written to a temporary + file. + + in_memory : bool, default is False + Whether to initiated the Raster from an array and store + the data in-memory using Rasterio's in-memory files. + + driver : str (default 'GTiff') + Named of GDAL-supported driver for file export + + dtype : str (optional, default None) + Optionally specify a GDAL compatible data type when saving + to file. If not specified, np.float32 is assumed. + + nodata : any number (optional, default None) + Nodata value for file export. If not specified then the + nodata value is derived from the minimum permissible value + for the given data type. + + progress : bool (default False) + Show progress bar for prediction. + + constants: list-like object or a dict (optional, default None) + Constant features to add to the Raster object with each value + in a list or 1d ndarray representing an additional feature. + + If a list-like object of values os passed, then each numeric + value will be appended as constant features to the last + columns in the data. It is therefore important that all + features including constant features are present in the same + order as what was used to train the model. + + If a dict is passed, then the keys of the dict must refer to + the names of raster layers in the Raster object. In this case, + the values of the dict will replace the values of the raster + layers in the Raster object. + + kwargs : opt + Optional named arguments to pass to the format drivers. + For example can be `compress="deflate"` to add compression. + + Returns + ------- + Raster + Raster object containing prediction results as a + RasterLayers. For classification and regression models, the + Raster will contain a single RasterLayer, unless the model + is multi-class or multi-target. Layers are named + automatically as `pred_raw_n` with n = 1, 2, 3 ..n. + """ + tfile = None + + if in_memory is False: + file_path, tfile = self._tempfile(file_path) + + # n_jobs = get_num_workers(n_jobs) + + # determine output count for multi-class or multi-target cases + window = Window(0, 0, 1, 1) + img = self.read(masked=True, window=window) + img = np.ma.masked_invalid(img) + n_features, rows, cols = img.shape[0], img.shape[1], img.shape[2] + n_samples = rows * cols + flat_pixels = img.transpose(1, 2, 0).reshape((n_samples, n_features)) + flat_pixels = flat_pixels.filled(0) + + if constants is not None: + flat_pixels = stack_constants(flat_pixels, constants, list(self.names)) + + result = estimator.predict(flat_pixels) + + if result.ndim > 1: + n_outputs = result.shape[result.ndim - 1] + else: + n_outputs = 1 + + indexes = np.arange(0, n_outputs) + + # chose prediction function + if len(indexes) == 1: + if constants is not None: + predfun = partial( + predict_output, + estimator=estimator, + constants=constants, + names=list(self.names), + ) + else: + predfun = partial( + predict_output, + estimator=estimator, + constants=constants, + names=list(self.names), + ) + else: + predfun = partial( + predict_multioutput, + estimator=estimator, + constants=constants, + names=list(self.names), + ) + + # check dtype and nodata + if dtype is None: + dtype = self._check_supported_dtype(result) + else: + dtype = self._check_supported_dtype(dtype) + + if nodata is None: + nodata = get_nodata_value(dtype) + + # open output file with updated metadata + meta = self.meta.copy() + count = len(indexes) + meta.update(driver=driver, count=count, dtype=dtype, nodata=nodata) + meta.update(kwargs) + + # get windows + windows = [w for w in self.block_shapes(*self.block_shape)] + data_gen = ((w, self.read(window=w, masked=True)) for w in windows) + counter = tqdm(windows, disable=not progress, total=len(windows)) + + if in_memory is False: + with rasterio.open(file_path, "w", **meta) as dst: + for w, res, pbar in zip(windows, map(predfun, data_gen), counter): + res = np.ma.filled(res, fill_value=nodata) + dst.write(res[indexes, :, :].astype(dtype), window=w) + + output_dst = file_path + + else: + with MemoryFile() as memfile: + dst = memfile.open( + height=meta["height"], + width=meta["width"], + count=meta["count"], + dtype=meta["dtype"], + crs=meta["crs"], + driver=driver, + transform=meta["transform"], + nodata=meta["nodata"], + ) + + for w, res, pbar in zip(windows, map(predfun, data_gen), counter): + res = np.ma.filled(res, fill_value=nodata) + dst.write(res[indexes, :, :].astype(dtype), window=w) + + output_dst = [ + RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) + ] + + for i in output_dst: + i.in_memory = True + + # create new Raster object with the result + prefix = "pred_raw_" + names = [prefix + str(i) for i in range(len(indexes))] + new_raster = self._copy(output_dst, names) + + # override close method + if tfile is not None: + for layer in new_raster.iloc: + layer._close = tfile.close + + return new_raster + + def append(self, other, in_place=False): + """Method to add new RasterLayers to a Raster object. + + Note that this modifies the Raster object in-place by default. + + Parameters + ---------- + other : Raster object, or list of Raster objects + Object to append to the Raster. + + in_place : bool (default False) + Whether to change the Raster object in-place or leave + original and return a new Raster object. + + Returns + ------- + Raster + Returned only if `in_place` is False + """ + if isinstance(other, Raster): + other = [other] + + combined_names = self.names + combined_layers = list(self.loc.values()) + + for new_raster in other: + if not isinstance(new_raster, Raster): + raise AttributeError(new_raster + " is not a pyspatialml.Raster object") + + # check that other raster does not result in duplicated names + combined_names = list(combined_names) + list(new_raster.names) + combined_names = self._fix_names(combined_names) + + # update layers and names + combined_layers = combined_layers + list(new_raster.loc.values()) + + for layer, name in zip(combined_layers, combined_names): + layer.names = [name] + + if in_place is True: + self._layers = combined_layers + self.names = combined_names + else: + new_raster = self._copy(self.files, self.names) + new_raster._layers = combined_layers + return new_raster + + def drop(self, labels, in_place=False): + """Drop individual RasterLayers from a Raster object + + Note that this modifies the Raster object in-place by default. + + Parameters + --------- + labels : single label or list-like + Index (int) or layer name to drop. Can be a single integer + or label, or a list of integers or labels. + + in_place : bool (default False) + Whether to change the Raster object in-place or leave + original and return a new Raster object. + + Returns + ------- + pyspatialml.Raster + Returned only if `in_place` is True + """ + # convert single label to list + if isinstance(labels, (str, int)): + labels = [labels] + + # numerical index based subsetting + if len([i for i in labels if isinstance(i, int)]) == len(labels): + subset_layers = [ + v for (i, v) in enumerate(list(self.loc.values())) if i not in labels + ] + + # str label based subsetting + elif len([i for i in labels if isinstance(i, str)]) == len(labels): + subset_layers = [ + v + for (i, v) in enumerate(list(self.loc.values())) + if list(self.names)[i] not in labels + ] + + else: + raise ValueError( + "Cannot drop layers based on mixture of indexes and labels" + ) + + if in_place is True: + self._layers = subset_layers + else: + new_raster = self._copy(self.files, self.names) + new_raster._layers = subset_layers + + return new_raster + + def rename(self, names, in_place=False): + """Rename a RasterLayer within the Raster object. + + Parameters + ---------- + names : dict + dict of old_name : new_name + + in_place : bool (default False) + Whether to change names of the Raster object in-place or + leave original and return a new Raster object. + + Returns + ------- + pyspatialml.Raster + Returned only if `in_place` is False + """ + if in_place is True: + for old_name, new_name in names.items(): + self._rename_inplace(old_name, new_name) + else: + new_raster = self._copy(src=[v for (_, v) in self.items()]) + + for old_name, new_name in names.items(): + new_raster._rename_inplace(old_name, new_name) + + return new_raster + + def mask( + self, + shapes, + invert=False, + crop=True, + pad=False, + file_path=None, + in_memory=False, + driver="GTiff", + dtype=None, + nodata=None, + **kwargs + ): + """Mask a Raster object based on the outline of shapes in a + geopandas.GeoDataFrame + + Parameters + ---------- + shapes : geopandas.GeoDataFrame + GeoDataFrame containing masking features. + + invert : bool (default False) + If False then pixels outside shapes will be masked. If True + then pixels inside shape will be masked. + + crop : bool (default True) + Crop the raster to the extent of the shapes. + + pad : bool (default False) + If True, the features will be padded in each direction by + one half of a pixel prior to cropping raster. + + file_path : str (optional, default None) + File path to save to resulting Raster. If not supplied + then the resulting Raster is saved to a temporary file. + + in_memory : bool, default is False + Whether to initiated the Raster from an array and store + the data in-memory using Rasterio's in-memory files. + + driver : str (default 'GTiff') + Named of GDAL-supported driver for file export. + + dtype : str (optional, default None) + Coerce RasterLayers to the specified dtype. If not + specified then the cropped Raster is created using the + existing dtype, which usesa dtype that can accommodate the + data types of all of the individual RasterLayers. + + nodata : any number (optional, default None) + Nodata value for cropped dataset. If not specified then a + nodata value is set based on the minimum permissible value + of the Raster's data type. Note that this changes the + values of the pixels to the new nodata value, and changes + the metadata of the raster. + + kwargs : opt + Optional named arguments to pass to the format drivers. + For example can be `compress="deflate"` to add compression. + + Returns + ------- + pyspatialml.Raster + Raster with masked layers. + """ + # some checks + if invert is True: + crop = False + + tfile = None + + if in_memory is False: + file_path, tfile = self._tempfile(file_path) + + meta = self.meta.copy() + dtype = self._check_supported_dtype(dtype) + + if nodata is None: + nodata = get_nodata_value(dtype) + + meta["dtype"] = dtype + + masked_ndarrays = [] + + for layer in self.loc.values(): + # set pixels outside of mask to raster band's nodata value + masked_arr, transform = rasterio.mask.mask( + dataset=layer.ds, + shapes=[shapes.geometry.unary_union], + filled=False, + invert=invert, + crop=crop, + pad=pad, + ) + + if layer.ds.count > 1: + masked_arr = masked_arr[layer.bidx - 1, :, :] + + else: + masked_arr = np.ma.squeeze(masked_arr) + + masked_ndarrays.append(masked_arr) + + # stack list of 2d arrays into 3d array + masked_ndarrays = np.ma.stack(masked_ndarrays) + + # write to file + meta["transform"] = transform + meta["driver"] = driver + meta["nodata"] = nodata + meta["height"] = masked_ndarrays.shape[1] + meta["width"] = masked_ndarrays.shape[2] + meta.update(kwargs) + masked_ndarrays = masked_ndarrays.filled(fill_value=nodata) + + if in_memory is False: + with rasterio.open(file_path, "w", **meta) as dst: + dst.write(masked_ndarrays.astype(dtype)) + output_dst = file_path + else: + with MemoryFile() as memfile: + dst = memfile.open(**meta) + dst.write(masked_ndarrays.astype(dtype)) + + output_dst = [ + RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) + ] + for i in output_dst: + i.in_memory = True + + # create new Raster object with the result + new_raster = self._copy(output_dst, self.names) + + # override close method + if tfile is not None: + for layer in new_raster.iloc: + layer._close = tfile.close + + return new_raster + + def intersect( + self, + file_path=None, + in_memory=False, + driver="GTiff", + dtype=None, + nodata=None, + **kwargs + ): + """Perform a intersect operation on the Raster object. + + Computes the geometric intersection of the RasterLayers with + the Raster object. This will cause nodata values in any of + the rasters to be propagated through all of the output rasters. + + Parameters + ---------- + file_path : str (optional, default None) + File path to save to resulting Raster. If not supplied then + the resulting Raster is saved to a temporary file. + + in_memory : bool, default is False + Whether to initiated the Raster from an array and store the + data in-memory using Rasterio's in-memory files. + + driver : str (default 'GTiff') + Named of GDAL-supported driver for file export. + + dtype : str (optional, default None) + Coerce RasterLayers to the specified dtype. If not + specified then the new intersected Raster is created using + the dtype of the existing Raster dataset, which uses a + dtype that can accommodate the data types of all of the + individual RasterLayers. + + nodata : any number (optional, default None) + Nodata value for new dataset. If not specified then a + nodata value is set based on the minimum permissible value + of the Raster's data type. Note that this changes the + values of the pixels that represent nodata to the new + value. + + kwargs : opt + Optional named arguments to pass to the format drivers. + For example can be `compress="deflate"` to add compression. + + Returns + ------- + Raster + Raster with layers that are masked based on a union of all + masks in the suite of RasterLayers. + """ + tfile = None + + if in_memory is False: + file_path, tfile = self._tempfile(file_path) + + meta = self.meta.copy() + dtype = self._check_supported_dtype(dtype) + + if nodata is None: + nodata = get_nodata_value(dtype) + + arr = self.read(masked=True) + mask_2d = arr.mask.any(axis=0) + + # repeat mask for n_bands + mask_3d = np.repeat(a=mask_2d[np.newaxis, :, :], repeats=self.count, axis=0) + + intersected_arr = np.ma.masked_array(arr, mask=mask_3d, fill_value=nodata) + intersected_arr = np.ma.filled(intersected_arr, fill_value=nodata) + + meta["driver"] = driver + meta["nodata"] = nodata + meta["dtype"] = dtype + meta.update(kwargs) + + if in_memory is False: + with rasterio.open(file_path, "w", **meta) as dst: + dst.write(intersected_arr.astype(dtype)) + output_dst = file_path + else: + with MemoryFile() as memfile: + dst = memfile.open(**meta) + dst.write(intersected_arr.astype(dtype)) + + output_dst = [ + RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) + ] + for i in output_dst: + i.in_memory = True + + # create new Raster object with the result + new_raster = self._copy(output_dst, self.names) + + # override close method + if tfile is not None: + for layer in new_raster.iloc: + layer._close = tfile.close + + return new_raster + + def crop( + self, + bounds, + file_path=None, + in_memory=False, + driver="GTiff", + dtype=None, + nodata=None, + **kwargs + ): + """Crops a Raster object by the supplied bounds. + + Parameters + ---------- + bounds : tuple + A tuple containing the bounding box to clip by in the form + of (xmin, ymin, xmax, ymax). + + file_path : str (optional, default None) + File path to save to cropped raster. If not supplied then + the cropped raster is saved to a temporary file. + + in_memory : bool, default is False + Whether to initiated the Raster from an array and store + the data in-memory using Rasterio's in-memory files. + + driver : str (default 'GTiff'). Default is 'GTiff' + Named of GDAL-supported driver for file export. + + dtype : str (optional, default None) + Coerce RasterLayers to the specified dtype. If not + specified then the new intersected Raster is created using + the dtype of theexisting Raster dataset, which uses a + dtype that can accommodate the data types of all of the + individual RasterLayers. + + nodata : any number (optional, default None) + Nodata value for new dataset. If not specified then a + nodata value is set based on the minimum permissible value + of the Raster's data type. Note that this does not change + the pixel nodata values of the raster, it only changes + the metadata of what value represents a nodata pixel. + + kwargs : opt + Optional named arguments to pass to the format drivers. + For example can be `compress="deflate"` to add compression. + + Returns + ------- + Raster + Raster cropped to new extent. + """ + tfile = None + + if in_memory is False: + file_path, tfile = self._tempfile(file_path) + + dtype = self._check_supported_dtype(dtype) + if nodata is None: + nodata = get_nodata_value(dtype) + + # get row, col positions for bounds + xmin, ymin, xmax, ymax = bounds + rows, cols = rasterio.transform.rowcol( + transform=self.transform, xs=(xmin, xmax), ys=(ymin, ymax) + ) + + # create window covering the min/max rows and cols + window = Window( + col_off=min(cols), + row_off=min(rows), + width=max(cols) - min(cols), + height=max(rows) - min(rows), + ) + cropped_arr = self.read(masked=True, window=window) + + # calculate the new transform + new_transform = rasterio.transform.from_bounds( + west=xmin, + south=ymin, + east=xmax, + north=ymax, + width=cropped_arr.shape[2], + height=cropped_arr.shape[1], + ) + + # update the destination meta + meta = self.meta.copy() + meta.update( + transform=new_transform, + width=cropped_arr.shape[2], + height=cropped_arr.shape[1], + driver=driver, + nodata=nodata, + dtype=dtype, + ) + meta.update(kwargs) + cropped_arr = cropped_arr.filled(fill_value=nodata) + + if in_memory is False: + with rasterio.open(file_path, "w", **meta) as dst: + dst.write(cropped_arr.astype(dtype)) + output_dst = file_path + + else: + with MemoryFile() as memfile: + dst = memfile.open(**meta) + dst.write(cropped_arr.astype(dtype)) + + output_dst = [ + RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) + ] + for i in output_dst: + i.in_memory = True + + new_raster = self._copy(output_dst, self.names) + + if tfile is not None: + for layer in new_raster.iloc: + layer._close = tfile.close + + return new_raster + + def to_crs( + self, + crs, + resampling="nearest", + file_path=None, + in_memory=False, + driver="GTiff", + nodata=None, + n_jobs=1, + warp_mem_lim=0, + progress=False, + **kwargs + ): + """Reprojects a Raster object to a different crs. + + Parameters + ---------- + crs : rasterio.transform.CRS object, or dict + Example: CRS({'init': 'EPSG:4326'}) + + resampling : str (default 'nearest') + Resampling method to use. One of the following: + nearest, + bilinear, + cubic, + cubic_spline, + lanczos, + average, + mode, + max (GDAL >= 2.2), + min (GDAL >= 2.2), + med (GDAL >= 2.2), + q1 (GDAL >= 2.2), + q3 (GDAL >= 2.2) + + file_path : str (optional, default None) + Optional path to save reprojected Raster object. If not + specified then a tempfile is used. + + in_memory : bool, default is False + Whether to initiated the Raster from an array and store the + data in-memory using Rasterio's in-memory files. + + driver : str (default 'GTiff') + Named of GDAL-supported driver for file export. + + nodata : any number (optional, default None) + Nodata value for new dataset. If not specified then the + existing nodata value of the Raster object is used, which + can accommodate the dtypes of the individual layers in the + Raster. + + n_jobs : int (default 1) + The number of warp worker threads. + + warp_mem_lim : int (default 0) + The warp operation memory limit in MB. Larger values allow + the warp operation to be carried out in fewer chunks. The + amount of memory required to warp a 3-band uint8 2000 row + x 2000 col raster to a destination of the same size is + approximately 56 MB. The default (0) means 64 MB with GDAL + 2.2. + + progress : bool (default False) + Optionally show progress of transform operations. + + kwargs : opt + Optional named arguments to pass to the format drivers. + For example can be `compress="deflate"` to add compression. + + Returns + ------- + Raster + Raster following reprojection. + """ + tfile = None + + if in_memory is False: + file_path, tfile = self._tempfile(file_path) + + if nodata is None: + nodata = get_nodata_value(self.meta["dtype"]) + + resampling_methods = [i.name for i in rasterio.enums.Resampling] + if resampling not in resampling_methods: + raise ValueError( + "Resampling method must be one of {}:".format(resampling_methods) + ) + + dst_transform, dst_width, dst_height = calculate_default_transform( + src_crs=self.crs, + dst_crs=crs, + width=self.width, + height=self.height, + left=self.bounds.left, + right=self.bounds.right, + bottom=self.bounds.bottom, + top=self.bounds.top, + ) + + meta = self.meta.copy() + meta["nodata"] = nodata + meta["width"] = dst_width + meta["height"] = dst_height + meta["transform"] = dst_transform + meta["crs"] = crs + meta.update(kwargs) + + if progress is True: + t = tqdm(total=self.count) + + if in_memory is False: + with rasterio.open(file_path, "w", driver=driver, **meta) as dst: + for i, layer in enumerate(self.loc.values()): + reproject( + source=rasterio.band(layer.ds, layer.bidx), + destination=rasterio.band(dst, i + 1), + resampling=rasterio.enums.Resampling[resampling], + num_threads=n_jobs, + warp_mem_lim=warp_mem_lim, + ) + + if progress is True: + t.update() + + output_dst = file_path + + else: + with MemoryFile() as memfile: + dst = memfile.open(driver=driver, **meta) + for i, layer in enumerate(self.loc.values()): + reproject( + source=rasterio.band(layer.ds, layer.bidx), + destination=rasterio.band(dst, i + 1), + resampling=rasterio.enums.Resampling[resampling], + num_threads=n_jobs, + warp_mem_lim=warp_mem_lim, + ) + + if progress is True: + t.update() + + output_dst = [ + RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) + ] + + for i in output_dst: + i.in_memory = True + + new_raster = self._copy(output_dst, self.names) + + if tfile is not None: + for layer in new_raster.iloc: + layer._close = tfile.close + + return new_raster + + def aggregate( + self, + out_shape, + resampling="nearest", + file_path=None, + in_memory=False, + driver="GTiff", + dtype=None, + nodata=None, + **kwargs + ): + """Aggregates a raster to (usually) a coarser grid cell size. + + Parameters + ---------- + out_shape : tuple + New shape in (rows, cols). + + resampling : str (default 'nearest') + Resampling method to use when applying decimated reads when + out_shape is specified. Supported methods are: 'average', + 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', + 'max', 'med', 'min', 'mode', 'q1', 'q3'. + + file_path : str (optional, default None) + File path to save to cropped raster. If not supplied then + the aggregated raster is saved to a temporary file. + + in_memory : bool, default is False + Whether to initiated the Raster from an array and store + the data in-memory using Rasterio's in-memory files. + + driver : str (default 'GTiff') + Named of GDAL-supported driver for file export. + + dtype : str (optional, default None) + Coerce RasterLayers to the specified dtype. If not + specified then the new intersected Raster is created using + the dtype of the existing Raster dataset, which uses a + dtype that can accommodate the data types of all of the + individual RasterLayers. + + nodata : any number (optional, default None) + Nodata value for new dataset. If not specified then a + nodata value is set based on the minimum permissible value + of the Raster's dtype. Note that this does not change the + pixel nodata values of the raster, it only changes the + metadata of what value represents a nodata pixel. + + kwargs : opt + Optional named arguments to pass to the format drivers. For + example can be `compress="deflate"` to add compression. + + Returns + ------- + Raster + Raster object aggregated to a new pixel size. + """ + tfile = None + + if in_memory is False: + file_path, tfile = self._tempfile(file_path) + + rows, cols = out_shape + arr = self.read(masked=True, out_shape=out_shape, resampling=resampling) + meta = self.meta.copy() + dtype = self._check_supported_dtype(dtype) + + if nodata is None: + nodata = get_nodata_value(dtype) + + arr = arr.filled(fill_value=nodata) + + meta["driver"] = driver + meta["nodata"] = nodata + meta["height"] = rows + meta["width"] = cols + meta["dtype"] = dtype + bnd = self.bounds + meta["transform"] = rasterio.transform.from_bounds( + west=bnd.left, + south=bnd.bottom, + east=bnd.right, + north=bnd.top, + width=cols, + height=rows, + ) + meta.update(kwargs) + + if in_memory is False: + with rasterio.open(file_path, "w", **meta) as dst: + dst.write(arr.astype(dtype)) + output_dst = file_path + + else: + with MemoryFile() as memfile: + dst = memfile.open(**meta) + dst.write(arr.astype(dtype)) + + output_dst = [ + RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) + ] + for i in output_dst: + i.in_memory = True + + new_raster = self._copy(output_dst, self.names) + + if tfile is not None: + for layer in new_raster.iloc: + layer._close = tfile.close + + return new_raster + + def apply( + self, + function, + file_path=None, + in_memory=False, + driver="GTiff", + dtype=None, + nodata=None, + progress=False, + function_args={}, + **kwargs + ): + """Apply user-supplied function to a Raster object. + + Parameters + ---------- + function : function + Function that takes an numpy array as a single argument. + + file_path : str (optional, default None) + Optional path to save calculated Raster object. If not + specified then a tempfile is used. + + in_memory : bool, default is False + Whether to initiated the Raster from an array and store the + data in-memory using Rasterio's in-memory files. + + driver : str (default 'GTiff') + Named of GDAL-supported driver for file export. + + dtype : str (optional, default None) + Coerce RasterLayers to the specified dtype. If not + specified then the new Raster is created using the dtype of + the calculation result. + + nodata : any number (optional, default None) + Nodata value for new dataset. If not specified then a + nodata value is set based on the minimum permissible value + of the Raster's data type. Note that this changes the + values of the pixels that represent nodata pixels. + + progress : bool (default False) + Optionally show progress of transform operations. + + function_args : dict (optional) + Optionally pass arguments to the `function` as a dict or + keyword arguments. + + kwargs : opt + Optional named arguments to pass to the format drivers. + For example can be `compress="deflate"` to add compression. + + Returns + ------- + Raster + Raster containing the calculated result. + """ + tfile = None + + if in_memory is False: + file_path, tfile = self._tempfile(file_path) + + function = partial(function, **function_args) + + # perform test calculation determine dimensions, dtype, nodata + window = next(self.block_shapes(*self.block_shape)) + img = self.read(masked=True, window=window) + arr = function(img, **function_args) + + if arr.ndim > 2: + indexes = np.arange(1, arr.shape[0] + 1) + count = len(indexes) + else: + indexes = 1 + count = 1 + + dtype = self._check_supported_dtype(dtype) + + if nodata is None: + nodata = get_nodata_value(dtype) + + # open output file with updated metadata + meta = self.meta.copy() + meta.update(driver=driver, count=count, dtype=dtype, nodata=nodata) + meta.update(kwargs) + + # get windows + windows = [w for w in self.block_shapes(*self.block_shape)] + data_gen = (self.read(window=w, masked=True) for w in windows) + counter = tqdm(windows, total=len(windows), disable=not progress) + + if in_memory is False: + with rasterio.open(file_path, "w", **meta) as dst: + for w, res, pbar in zip(windows, map(function, data_gen), counter): + res = np.ma.filled(res, fill_value=nodata) + dst.write(res.astype(dtype), window=w, indexes=indexes) + + output_dst = file_path + + else: + with MemoryFile() as memfile: + dst = memfile.open(**meta) + for w, res, pbar in zip(windows, map(function, data_gen), counter): + res = np.ma.filled(res, fill_value=nodata) + dst.write(res.astype(dtype), window=w, indexes=indexes) + + output_dst = [ + RasterLayer(rasterio.band(dst, i + 1)) for i in range(dst.count) + ] + + for i in output_dst: + i.in_memory = True + + # create new raster object with result + new_raster = self._copy(output_dst) + + # override close method + if tfile is not None: + for layer in new_raster.iloc: + layer._close = tfile.close + + return new_raster + + def to_pandas(self, max_pixels=None, resampling="nearest") -> pd.DataFrame: + """Raster to pandas DataFrame. + + Parameters + ---------- + max_pixels: int (default None) + Maximum number of pixels to sample. By default all pixels + are used. + + resampling : str (default 'nearest') + Resampling method to use when applying decimated reads when + out_shape is specified. Supported methods are: 'average', + 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', + 'max', 'med', 'min', 'mode', 'q1', 'q3'. + + Returns + ------- + pandas.DataFrame + DataFrame containing values of names of RasterLayers in + the Raster as columns, and pixel values as rows. + """ + + # read dataset using decimated reads + if max_pixels is not None: + rel_width = self.shape[1] / max_pixels + + if rel_width > 1: + col_scaling = round(max_pixels / rel_width) + row_scaling = max_pixels - col_scaling + else: + col_scaling = round(max_pixels * rel_width) + row_scaling = max_pixels - col_scaling + else: + row_scaling, col_scaling = self.shape[0], self.shape[1] + + out_shape = (row_scaling, col_scaling) + arr = self.read(masked=True, out_shape=out_shape, resampling=resampling) + bands, rows, cols = arr.shape + nodatavals = self.nodatavals + + # x and y grid coordinate arrays + x_range = np.linspace(start=self.bounds.left, stop=self.bounds.right, num=cols) + y_range = np.linspace(start=self.bounds.top, stop=self.bounds.bottom, num=rows) + xs, ys = np.meshgrid(x_range, y_range) + + arr = arr.reshape((bands, rows * cols)) + arr = arr.transpose() + df = pd.DataFrame( + data=np.column_stack((xs.flatten(), ys.flatten(), arr)), + columns=["x", "y"] + list(self.names), + ) + + # set nodata values to nan + for i, col_name in enumerate(self.names): + df.loc[df[col_name] == nodatavals[i], col_name] = np.nan + + return df + + def sample(self, size, strata=None, return_array=False, random_state=None): + """Generates a random sample of according to size, and samples + the pixel values. + + Parameters + ---------- + size : int + Number of random samples or number of samples per strata if + a `strata` object is supplied. + + strata : pyspatialml Raster object (opt) + Whether to use stratified instead of random sampling. Strata + can be supplied using another pyspatialml.Raster object. + + return_array : bool (opt), default=False + Optionally return extracted data as separate X and xy + masked numpy arrays. + + random_state : int (opt) + integer to use within random.seed. + + Returns + ------- + pandas.DataFrame + DataFrame containing values of names of RasterLayers in the Raster + if `return_array` is False. + + tuple + A tuple containing two elements if `return_array` is True: + + - numpy.ndarray + Numpy array of extracted raster values, typically 2d. + - numpy.ndarray + 2D numpy array of xy coordinates of extracted values. + """ + # set the seed + np.random.seed(seed=random_state) + + if not strata: + # create np array to store randomly sampled data + valid_samples = np.zeros((0, self.count)) + valid_coordinates = np.zeros((0, 2)) + + # loop until target number of samples is satisfied + satisfied = False + + n = size + while satisfied is False: + + # generate random row and column indices + Xsample = np.random.choice(range(0, self.width), n) + Ysample = np.random.choice(range(0, self.height), n) + + # create 2d numpy array with sample locations set to 1 + sample_raster = np.empty((self.height, self.width)) + sample_raster[:] = np.nan + sample_raster[Ysample, Xsample] = 1 + + # get indices of sample locations + rows, cols = np.nonzero(np.isnan(sample_raster) == False) + + # convert row, col indices to coordinates + xy = np.transpose(rasterio.transform.xy(self.transform, rows, cols)) + + # sample at random point locations + samples = self.extract_xy_chunked(xs=xy[:, 0], ys=xy[:, 1]) + + # append only non-masked data to each row of X_random + samples = samples.astype("float32").filled(np.nan) + invalid_ind = np.isnan(samples).any(axis=1) + samples = samples[~invalid_ind, :] + valid_samples = np.append(valid_samples, samples, axis=0) + + xy = xy[~invalid_ind, :] + valid_coordinates = np.append(valid_coordinates, xy, axis=0) + + # check to see if target_nsamples has been reached + if len(valid_samples) >= size: + satisfied = True + else: + n = size - len(valid_samples) + + else: + if strata.count != 1: + raise AttributeError( + "Strata must be a Raster object with a single band." + ) + + # get number of unique categories + strata_arr = strata.iloc[0].read(masked=True) + categories = np.unique(strata_arr.flatten()) + categories = categories[~categories.mask] + + # store selected coordinates + selected = np.zeros((0, 2)) + + for cat in categories: + + # get row,col positions for cat strata + ind = np.transpose(np.nonzero(strata_arr == cat)) + + if size > ind.shape[0]: + msg = ( + "Sample size is greater than number of pixels in " "strata {}" + ).format(str(ind)) + + msg = os.linesep.join([msg, "Sampling using replacement"]) + Warning(msg) + + # random sample + sample = np.random.uniform(0, ind.shape[0], size).astype("int") + xy = ind[sample, :] + + selected = np.append(selected, xy, axis=0) + + # convert row, col indices to coordinates + x, y = rasterio.transform.xy( + transform=self.transform, rows=selected[:, 0], cols=selected[:, 1] + ) + valid_coordinates = np.column_stack((x, y)) + + # extract data + valid_samples = self.extract_xy_chunked( + xs=valid_coordinates[:, 0], ys=valid_coordinates[:, 1] + ) + + # return as geopandas array as default (or numpy arrays) + if return_array is False: + gdf = pd.DataFrame(valid_samples, columns=self.names) + gdf["geometry"] = list( + zip(valid_coordinates[:, 0], valid_coordinates[:, 1]) + ) + gdf["geometry"] = gdf["geometry"].apply(Point) + gdf = gpd.GeoDataFrame(gdf, geometry="geometry", crs=self.crs) + return gdf + else: + return valid_samples, valid_coordinates + + def extract_xy(self, xys, return_array=False, progress=False): + """Samples pixel values using an array of xy locations. + + Parameters + ---------- + xys : 2d array-like + x and y coordinates from which to sample the raster + (n_samples, xys). + + return_array : bool (opt), default=False + By default the extracted pixel values are returned as a + geopandas.GeoDataFrame. If `return_array=True` then the + extracted pixel values are returned as a tuple of + numpy.ndarrays. + + progress : bool (opt), default=False + Show a progress bar for extraction. + + Returns + ------- + geopandas.GeoDataframe + Containing extracted data as point geometries if + `return_array=False`. + + numpy.ndarray + 2d masked array containing sampled raster values (sample, + bands) at the x,y locations. + """ + # extract pixel values + dtype = np.find_common_type([np.float32], self.dtypes) + X = np.ma.zeros((xys.shape[0], self.count), dtype=dtype) + t = tqdm(self.loc.values(), total=self.count, disable=not progress) + + for i, (layer, pbar) in enumerate(zip(self.loc.values(), t)): + sampler = sample_gen( + dataset=layer.ds, xy=xys, indexes=layer.bidx, masked=True + ) + v = np.ma.asarray([i for i in sampler]) + X[:, i] = v.flatten() + + # return as geopandas array as default (or numpy arrays) + if return_array is False: + gdf = pd.DataFrame(X, columns=self.names) + gdf["geometry"] = list(zip(xys[:, 0], xys[:, 1])) + gdf["geometry"] = gdf["geometry"].apply(Point) + gdf = gpd.GeoDataFrame(gdf, geometry="geometry", crs=self.crs) + return gdf + + return X + + def extract_xy_chunked(self, xs, ys, progress=False): + rows, cols = rowcol(self.transform, xs, ys) + rowcol_idx = np.column_stack((rows, cols)) + pixel_index = np.arange(rowcol_idx.shape[0]) + + # get row, col positions that are outside of the raster + negative_idx = (rowcol_idx < 0).any(axis=1) + outside_idx = (rowcol_idx[:, 0] >= self.shape[0]) | ( + rowcol_idx[:, 1] >= self.shape[1] + ) + + outsiders = np.logical_or(negative_idx, outside_idx) + valid = np.nonzero(outsiders == False)[0] + invalid = np.nonzero(outsiders == True)[0] + + # remove row, col > shape + rowcol_idx = rowcol_idx[~outsiders, :] + pixel_index = pixel_index[~outsiders] + + # lookup pixel values at row, col positons by chunk + windows = [w for w in self.block_shapes(*self.block_shape)] + data_gen = (self.read(window=w, masked=True) for w in windows) + t = tqdm(windows, total=len(windows), disable=not progress) + + dtype = np.result_type(np.float32, *self.dtypes) + X = np.ma.zeros((self.count, 0), dtype=dtype) + pixel_indices = np.zeros(0, dtype="int") + + for w, data, pbar in zip(windows, data_gen, t): + res, chunk_pixels = self.extract_by_chunk(data, w, rowcol_idx, pixel_index) + X = np.ma.concatenate((X, res), axis=1) + pixel_indices = np.concatenate((pixel_indices, chunk_pixels)) + + X = X.transpose((1, 0)) + + # insert empty rows to make input dimensions match output + output_arr = np.ma.zeros((len(rows), self.count)) + output_arr[pixel_indices, :] = X + output_arr[invalid, :].mask = True + output_arr[invalid, :] = None + + return output_arr + + def extract_vector(self, gdf, progress=False): + """Sample a Raster/RasterLayer using a geopandas GeoDataframe + containing points, lines or polygon features. + + Parameters + ---------- + gdf: geopandas.GeoDataFrame + Containing either point, line or polygon geometries. + Overlapping geometries will cause the same pixels to be + sampled. + + progress : bool (opt), default=False + Show a progress bar for extraction. + + Returns + ------- + geopandas.GeoDataframe + Containing extracted data as point geometries (one point + per pixel). The resulting GeoDataFrame is indexed using + a named pandas.MultiIndex, with `pixel_idx` index + referring to the index of each pixel that was sampled, and + the `geometry_idx` index referring to the index of the each + geometry in the supplied `gdf`. This makes it possible to + keep track of how sampled pixel relates to the original + geometries, i.e. multiple pixels being extracted within + the area of a single polygon that can be referred to using + the `geometry_idx`. + + The extracted data can subsequently be joined with the + attribute table of the supplied `gdf` using: + + training_py = geopandas.read_file(nc.polygons) + df = self.stack.extract_vector(gdf=training_py) + df = df.dropna() + + df = df.merge( + right=training_py.loc[:, ("id", "label")], + left_on="polygon_idx", + right_on="id", + right_index=True + ) + """ + # rasterize polygon and line geometries + if all(gdf.geom_type == "Polygon") or all(gdf.geom_type == "LineString"): + + shapes = [(geom, val) for geom, val in zip(gdf.geometry, gdf.index)] + arr = np.ma.zeros((self.height, self.width)) + arr[:] = -99999 + + arr = features.rasterize( + shapes=shapes, + fill=-99999, + out=arr, + transform=self.transform, + all_touched=True, + ) + + ids = arr[np.nonzero(arr != -99999)] + ids = ids.astype("int") + rows, cols = np.nonzero(arr != -99999) + xys = rasterio.transform.xy(transform=self.transform, rows=rows, cols=cols) + xys = np.transpose(xys) + + elif all(gdf.geom_type == "Point"): + ids = gdf.index.values + xys = gdf.bounds.iloc[:, 2:].values + + # extract raster pixels + X = self.extract_xy_chunked(xs=xys[:, 0], ys=xys[:, 1], progress=progress) + + # return as geopandas array as default (or numpy arrays) + X = pd.DataFrame( + data=X, columns=list(self.names), index=[pd.RangeIndex(0, X.shape[0]), ids] + ) + X.index.set_names(["pixel_idx", "geometry_idx"], inplace=True) + X["geometry"] = list(zip(xys[:, 0], xys[:, 1])) + X["geometry"] = X["geometry"].apply(Point) + X = gpd.GeoDataFrame(X, geometry="geometry", crs=self.crs) + + return X + + def extract_raster(self, src, progress=False): + """Sample a Raster object by an aligned raster of labelled pixels. + + Parameters + ---------- + src: rasterio DatasetReader + Single band raster containing labelled pixels as an open + rasterio DatasetReader object. + + progress : bool (opt), default=False + Show a progress bar for extraction. + + Returns + ------- + geopandas.GeoDataFrame + Geodataframe containing extracted data as point features if + `return_array=False` + """ + # open response raster and get labelled pixel indices and values + arr = src.read(1, masked=True) + rows, cols = np.nonzero(~arr.mask) + xys = np.transpose(rasterio.transform.xy(src.transform, rows, cols)) + ys = arr.data[rows, cols] + + # extract Raster object values at row, col indices + X = self.extract_xy_chunked(xs=xys[:, 0], ys=xys[:, 1], progress=progress) + + # summarize data + column_names = ["value"] + list(self.names) + gdf = pd.DataFrame(data=np.ma.column_stack((ys, X)), columns=column_names) + gdf["geometry"] = list(zip(xys[:, 0], xys[:, 1])) + gdf["geometry"] = gdf["geometry"].apply(Point) + gdf = gpd.GeoDataFrame(gdf, geometry="geometry", crs=self.crs) + + return gdf + + @staticmethod + def extract_by_chunk(arr, w, idx, pixel_idx): + d = idx.copy() + pixel_idx = pixel_idx.copy() + + # subtract chunk offset from row, col positions + d[:, 0] = d[:, 0] - w.row_off + d[:, 1] = d[:, 1] - w.col_off + + # remove negative row, col positions + pos = (d >= 0).all(axis=1) + d = d[pos, :] + pixel_idx = pixel_idx[pos] + + # remove row, col > shape + within_range = (d[:, 0] < arr.shape[1]) & (d[:, 1] < arr.shape[2]) + d = d[within_range, :] + pixel_idx = pixel_idx[within_range] + + extracted_data = arr[:, d[:, 0], d[:, 1]] + return (extracted_data, pixel_idx) + + def scale( + self, + centre=True, + scale=True, + file_path=None, + in_memory=False, + driver="GTiff", + dtype=None, + nodata=None, + progress=False, + ): + """Standardize (centre and scale) a Raster object by + subtracting the mean and dividing by the standard deviation for + each layer in the object. + + The mean and standard deviation statistics are calculated + for each layer separately. + + Parameters + ---------- + centre : bool, default is True + Whether to subtract the mean from each layer. + + scale : bool, default is True + Whether to divide each layer by the standard deviation of + the layer. + + file_path : str (optional, default None) + Path to a GeoTiff raster for the prediction results. If + not specified then the output is written to a temporary + file. + + in_memory : bool, default is False + Whether to initiated the Raster from an array and store the + data in-memory using Rasterio's in-memory files. + + driver : str (default 'GTiff') + Named of GDAL-supported driver for file export. + + dtype : str (optional, default None) + Optionally specify a GDAL compatible data type when saving + to file. If not specified, a data type is set based on the + data type of the prediction. + + nodata : any number (optional, default None) + Nodata value for file export. If not specified then the + nodata value is derived from the minimum permissible value + for the given data type. + + progress : bool (default False) + Show progress bar for operation. + + Returns + ------- + Pyspatialml.Raster object with rescaled data. + """ + + def scaler(x, means, sds): + for i, m, z in zip(range(x.shape[0]), means, sds): + x[i, :, :] = (x[i, :, :] - m) / z + return x + + if centre is True: + means = self.mean() + else: + means = np.repeat(0, self.count) + + if scale is True: + sds = self.stddev() + else: + sds = np.repeat(1, self.count) + + res = self.apply( + scaler, + file_path=file_path, + in_memory=in_memory, + driver=driver, + dtype=dtype, + nodata=nodata, + progress=progress, + function_args=dict(means=means, sds=sds), + ) + + return res + + def alter( + self, + transformer, + file_path=None, + in_memory=False, + driver="GTiff", + dtype=None, + nodata=None, + progress=False, + ): + """Apply a fitted scikit-learn transformer to a Raster object. + + Can be used to transform a raster using methods such as StandardScaler, + RobustScaler etc. + + Parameters + ---------- + transformer : a sklearn.preprocessing.Transformer object + + file_path : str (optional, default None) + Path to a GeoTiff raster for the prediction results. If + not specified then the output is written to a temporary + file. + + in_memory : bool, default is False + Whether to initiated the Raster from an array and store the + data in-memory using Rasterio's in-memory files. + + driver : str (default 'GTiff') + Named of GDAL-supported driver for file export. + + dtype : str (optional, default None) + Optionally specify a GDAL compatible data type when saving + to file. If not specified, a data type is set based on the + data type of the prediction. + + nodata : any number (optional, default None) + Nodata value for file export. If not specified then the + nodata value is derived from the minimum permissible value + for the given data type. + + progress : bool (default False) + Show progress bar for operation. + + Returns + ------- + Pyspatialml.Raster object with transformed data. + """ + res = self.apply( + self._apply_transformer, + file_path=file_path, + in_memory=in_memory, + driver=driver, + dtype=dtype, + nodata=nodata, + progress=progress, + function_args={"transformer": transformer}, + ) + + return res + + +class TempRasterLayer: + """Create a NamedTemporaryFile like object on Windows that has a + close method + + Workaround used on Windows which cannot open the file a second time + """ + + def __init__(self, tempdir=tempfile.tempdir): + self.tfile = tempfile.NamedTemporaryFile(dir=tempdir, suffix=".tif").name + self.name = self.tfile + + def close(self): + os.unlink(self.tfile) diff --git a/pyspatialml/rasterlayer.py b/pyspatialml/rasterlayer.py index 1823b87..1c9920c 100644 --- a/pyspatialml/rasterlayer.py +++ b/pyspatialml/rasterlayer.py @@ -1,440 +1,440 @@ -from functools import partial - -import os -import re -import numpy as np -import rasterio -from rasterio.io import MemoryFile - -from ._plotting import RasterLayerPlotMixin -from ._rasterstats import RasterLayerStatsMixin -from ._utils import get_nodata_value - - -class RasterLayer(RasterLayerStatsMixin, RasterLayerPlotMixin): - """Represents a single raster band derived from a single or - multi-band raster dataset - - Simple wrapper around a rasterio.Band object with additional - methods. Used because the Rasterio.Band.ds.read method reads - all bands from a multi-band dataset, whereas the RasterLayer read - method only reads a single band. - - Methods encapsulated in RasterLayer objects represent those that - typically would only be applied to a single-band of a raster, i.e. - sieve-clump, distance to non-NaN pixels, or arithmetic operations - on individual layers. - - Attributes - ---------- - bidx : int - The band index of the RasterLayer within the file dataset. - - dtype : str - The data type of the RasterLayer. - - ds : rasterio.band - The underlying rasterio.band object. - - name : str - A syntactically valid name for the RasterLayer. - - file : str - The file path to the dataset. - - nodata : any number - The number that is used to represent nodata pixels in the - RasterLayer. - - driver : str - The name of the GDAL format driver. - - meta : dict - A python dict storing the RasterLayer metadata. - - transform : affine.Affine object - The affine transform parameters. - - count : int - Number of layers; always equal to 1. - - shape: tuple - Shape of RasterLayer in (rows, columns) - - width, height: int - The width (cols) and height (rows) of the dataset. - - bounds : BoundingBox named tuple - A named tuple with left, bottom, right and top coordinates of - the dataset. - - cmap : str - The name of matplotlib map, or a custom - matplotlib.cm.LinearSegmentedColormap or ListedColormap object. - - norm : matplotlib.colors.Normalize (opt) - A matplotlib.colors.Normalize to apply to the RasterLayer. - This overides the norm attribute of the RasterLayer. - """ - - def __init__(self, band): - """Initiate a RasterLayer object - - Parameters - ---------- - band : a rasterio.Band object - """ - self.bidx = band.bidx - self.dtype = band.dtype - self.ds = band.ds - - if len(band.ds.files) > 0: - description = band.ds.descriptions[band.bidx-1] - if description is not None: - layer_name = self._make_name(band.ds.descriptions[band.bidx-1]) - else: - layer_name = self._make_name(band.ds.files[0]) - - self.name = layer_name - self.file = band.ds.files[0] - - else: - self.name = "in_memory" - self.file = None - - self.nodata = band.ds.nodata - self.driver = band.ds.meta["driver"] - self.meta = band.ds.meta - self.transform = band.ds.transform - self.crs = band.ds.crs - self.count = 1 - self.shape = band.shape - self.width = band.ds.width - self.height = band.ds.height - self.bounds = band.ds.bounds - self.in_memory = False - - self.cmap = "viridis" - self.norm = None - self.categorical = False - - @staticmethod - def _make_name(name): - """Converts a file basename to a valid class attribute name. - - Parameters - ---------- - name : str - File basename for converting to a valid class attribute name. - - Returns - ------- - valid_name : str - Syntactically correct name of layer so that it can form a class - instance attribute. - """ - basename = os.path.basename(name) - sans_ext = os.path.splitext(basename)[0] - - valid_name = sans_ext.replace(" ", "_").replace("-", "_").replace(".", "_") - - if valid_name[0].isdigit(): - valid_name = "x" + valid_name - - valid_name = re.sub(r"[\[\]\(\)\{\}\;]", "", valid_name) - valid_name = re.sub(r"_+", "_", valid_name) - - return valid_name - - def close(self): - self.ds.close() - - def _arith(self, function, other=None): - """General method for performing arithmetic operations on - RasterLayer objects - - Parameters - ---------- - function : function - Custom function that takes either one or two arrays, and - returns a single array following a pre-defined calculation. - - other : pyspatialml.RasterLayer (optional, default None) - If not specified, then a `function` should be provided that - performs a calculation using only the selected RasterLayer. - If `other` is specified, then a `function` should be - supplied that takes two ndarrays as arguments and performs a - calculation using both layers, i.e. layer1 - layer2. - - Returns - ------- - pyspatialml.RasterLayer - Returns a single RasterLayer containing the calculated - result. - """ - - driver = self.driver - - # if other is a RasterLayer then use the read method to get the - # array, otherwise assume other is a scalar or array - if isinstance(other, RasterLayer): - result = function(self.read(masked=True), other.read(masked=True)) - else: - result = function(self.read(masked=True), other) - - nodata = get_nodata_value(result.dtype) - - # open output file with updated metadata - meta = self.meta.copy() - meta.update(driver=driver, count=1, dtype=result.dtype, nodata=nodata) - - with MemoryFile() as memfile: - dst = memfile.open(**meta) - result = np.ma.filled(result, fill_value=nodata) - dst.write(result, indexes=1) - - # create RasterLayer from result - layer = RasterLayer(rasterio.band(dst, 1)) - - return layer - - def __add__(self, other): - """Implements behaviour for addition of two RasterLayers, - i.e. added_layer = layer1 + layer2 - """ - - def func(arr1, arr2): - return arr1 + arr2 - - return self._arith(func, other) - - def __sub__(self, other): - """Implements behaviour for subtraction of two RasterLayers, i.e. - subtracted_layer = layer1 - layer2 - """ - - def func(arr1, arr2): - return arr1 - arr2 - - return self._arith(func, other) - - def __mul__(self, other): - """Implements behaviour for multiplication of two RasterLayers, i.e. - product = layer1 * layer2 - """ - - def func(arr1, arr2): - return arr1 * arr2 - - return self._arith(func, other) - - def __truediv__(self, other): - """Implements behaviour for division using `/` of two RasterLayers, - i.e. div = layer1 / layer2 - """ - - def func(arr1, arr2): - return arr1 / arr2 - - return self._arith(func, other) - - def __and__(self, other): - """Implements & operator - - Equivalent to a intersection operation of self - with other, i.e. intersected = layer1 & layer2. - """ - - def func(arr1, arr2): - mask = np.logical_and(arr1, arr2).mask - arr1.mask[mask] = True - return arr1 - - return self._arith(func, other) - - def __or__(self, other): - """Implements | operator - - Fills gaps in self with pixels from other. Equivalent to a union - operation, i.e. union = layer1 | layer2. - """ - - def func(arr1, arr2): - idx = np.logical_or(arr1, arr2.mask).mask - arr1[idx] = arr2[idx] - return arr1 - - return self._arith(func, other) - - def __xor__(self, other): - """Exclusive OR using ^ - - Equivalent to a symmetrical difference where the result comprises - pixels that occur in self or other, but not both, i.e. - xor = layer1 ^ layer2. - """ - - def func(arr1, arr2): - mask = ~np.logical_xor(arr1, arr2) - idx = np.logical_or(arr1, arr2.mask).mask - arr1[idx] = arr2[idx] - arr1.mask[np.nonzero(mask)] = True - return arr1 - - return self._arith(func, other) - - def __round__(self, ndigits): - """Behaviour for round() function, i.e. round(layer)""" - - def func(arr, ndigits): - return np.round(arr, ndigits) - - func = partial(func, ndigits=ndigits) - - return self._arith(func) - - def __floor__(self): - """Rounding down to the nearest integer using math.floor(), - i.e. math.floor(layer)""" - - def func(arr): - return np.floor(arr) - - return self._arith(func) - - def __ceil__(self): - """Rounding up to the nearest integer using math.ceil(), i.e. - math.ceil(layer)""" - - def func(arr): - return np.ceil(arr) - - return self._arith(func) - - def __trunc__(self): - """Truncating to an integral using math.trunc(), i.e. - math.trunc(layer)""" - - def func(arr): - return np.trunc(arr) - - return self._arith(func) - - def __abs__(self): - """abs() function as applied to a RasterLayer, i.e. abs(layer)""" - - def func(arr): - return np.abs(arr) - - return self._arith(func) - - def __pos__(self): - """Unary positive, i.e. +layer1""" - - def func(arr): - return np.positive(arr) - - return self._arith(func) - - def __neg__(self): - """ - Unary negative, i.e. -layer1 - """ - - def func(arr): - return np.negative(arr) - - return self._arith(func) - - def read(self, **kwargs): - """Read method for a single RasterLayer. - - Reads the pixel values from a RasterLayer into a ndarray that - always will have two dimensions in the order of (rows, columns). - - Parameters - ---------- - **kwargs : named arguments that can be passed to the the - rasterio.DatasetReader.read method. - """ - if "resampling" in kwargs.keys(): - resampling_methods = [i.name for i in rasterio.enums.Resampling] - - if kwargs["resampling"] not in resampling_methods: - raise ValueError( - "Invalid resampling method. Resampling " - "method must be one of {0}:".format(resampling_methods) - ) - - kwargs["resampling"] = rasterio.enums.Resampling[kwargs["resampling"]] - - return self.ds.read(indexes=self.bidx, **kwargs) - - def seek(self, offset, whence=None): - return self - - def tell(self): - return self - - def write(self, file_path, driver="GTiff", dtype=None, nodata=None, **kwargs): - """Write method for a single RasterLayer. - - Parameters - ---------- - file_path : str (opt) - File path to save the dataset. - - driver : str - GDAL-compatible driver used for the file format. - - dtype : str (opt) - Numpy dtype used for the file. If omitted then the - RasterLayer's dtype is used. - - nodata : any number (opt) - A value used to represent the nodata pixels. If omitted - then the RasterLayer's nodata value is used (if assigned - already). - - kwargs : opt - Optional named arguments to pass to the format drivers. - For example can be `compress="deflate"` to add compression. - - Returns - ------- - pyspatialml.RasterLayer - """ - if dtype is None: - dtype = self.dtype - - if nodata is None: - nodata = get_nodata_value(dtype) - - meta = self.ds.meta - meta["driver"] = driver - meta["nodata"] = nodata - meta["dtype"] = dtype - meta.update(kwargs) - - # mask any nodata values - arr = np.ma.masked_equal(self.read(), self.nodata) - arr = arr.filled(fill_value=nodata) - - # write to file - with rasterio.open(file_path, mode="w", **meta) as dst: - dst.write(arr.astype(dtype), 1) - - src = rasterio.open(file_path) - band = rasterio.band(src, 1) - layer = RasterLayer(band) - - return layer - - def _extract_by_indices(self, rows, cols): - """Spatial query of Raster object (by-band)""" - - X = np.ma.zeros((len(rows), self.count), dtype="float32") - arr = self.read(masked=True) - X[:, 0] = arr[rows, cols] - - return X +from functools import partial + +import os +import re +import numpy as np +import rasterio +from rasterio.io import MemoryFile + +from ._plotting import RasterLayerPlotMixin +from ._rasterstats import RasterLayerStatsMixin +from ._utils import get_nodata_value + + +class RasterLayer(RasterLayerStatsMixin, RasterLayerPlotMixin): + """Represents a single raster band derived from a single or + multi-band raster dataset + + Simple wrapper around a rasterio.Band object with additional + methods. Used because the Rasterio.Band.ds.read method reads + all bands from a multi-band dataset, whereas the RasterLayer read + method only reads a single band. + + Methods encapsulated in RasterLayer objects represent those that + typically would only be applied to a single-band of a raster, i.e. + sieve-clump, distance to non-NaN pixels, or arithmetic operations + on individual layers. + + Attributes + ---------- + bidx : int + The band index of the RasterLayer within the file dataset. + + dtype : str + The data type of the RasterLayer. + + ds : rasterio.band + The underlying rasterio.band object. + + name : str + A syntactically valid name for the RasterLayer. + + file : str + The file path to the dataset. + + nodata : any number + The number that is used to represent nodata pixels in the + RasterLayer. + + driver : str + The name of the GDAL format driver. + + meta : dict + A python dict storing the RasterLayer metadata. + + transform : affine.Affine object + The affine transform parameters. + + count : int + Number of layers; always equal to 1. + + shape: tuple + Shape of RasterLayer in (rows, columns) + + width, height: int + The width (cols) and height (rows) of the dataset. + + bounds : BoundingBox named tuple + A named tuple with left, bottom, right and top coordinates of + the dataset. + + cmap : str + The name of matplotlib map, or a custom + matplotlib.cm.LinearSegmentedColormap or ListedColormap object. + + norm : matplotlib.colors.Normalize (opt) + A matplotlib.colors.Normalize to apply to the RasterLayer. + This overides the norm attribute of the RasterLayer. + """ + + def __init__(self, band): + """Initiate a RasterLayer object + + Parameters + ---------- + band : a rasterio.Band object + """ + self.bidx = band.bidx + self.dtype = band.dtype + self.ds = band.ds + + if len(band.ds.files) > 0: + description = band.ds.descriptions[band.bidx-1] + if description is not None: + layer_name = self._make_name(band.ds.descriptions[band.bidx-1]) + else: + layer_name = self._make_name(band.ds.files[0]) + + self.name = layer_name + self.file = band.ds.files[0] + + else: + self.name = "in_memory" + self.file = None + + self.nodata = band.ds.nodata + self.driver = band.ds.meta["driver"] + self.meta = band.ds.meta + self.transform = band.ds.transform + self.crs = band.ds.crs + self.count = 1 + self.shape = band.shape + self.width = band.ds.width + self.height = band.ds.height + self.bounds = band.ds.bounds + self.in_memory = False + + self.cmap = "viridis" + self.norm = None + self.categorical = False + + @staticmethod + def _make_name(name): + """Converts a file basename to a valid class attribute name. + + Parameters + ---------- + name : str + File basename for converting to a valid class attribute name. + + Returns + ------- + valid_name : str + Syntactically correct name of layer so that it can form a class + instance attribute. + """ + basename = os.path.basename(name) + sans_ext = os.path.splitext(basename)[0] + + valid_name = sans_ext.replace(" ", "_").replace("-", "_").replace(".", "_") + + if valid_name[0].isdigit(): + valid_name = "x" + valid_name + + valid_name = re.sub(r"[\[\]\(\)\{\}\;]", "", valid_name) + valid_name = re.sub(r"_+", "_", valid_name) + + return valid_name + + def close(self): + self.ds.close() + + def _arith(self, function, other=None): + """General method for performing arithmetic operations on + RasterLayer objects + + Parameters + ---------- + function : function + Custom function that takes either one or two arrays, and + returns a single array following a pre-defined calculation. + + other : pyspatialml.RasterLayer (optional, default None) + If not specified, then a `function` should be provided that + performs a calculation using only the selected RasterLayer. + If `other` is specified, then a `function` should be + supplied that takes two ndarrays as arguments and performs a + calculation using both layers, i.e. layer1 - layer2. + + Returns + ------- + pyspatialml.RasterLayer + Returns a single RasterLayer containing the calculated + result. + """ + + driver = self.driver + + # if other is a RasterLayer then use the read method to get the + # array, otherwise assume other is a scalar or array + if isinstance(other, RasterLayer): + result = function(self.read(masked=True), other.read(masked=True)) + else: + result = function(self.read(masked=True), other) + + nodata = get_nodata_value(result.dtype) + + # open output file with updated metadata + meta = self.meta.copy() + meta.update(driver=driver, count=1, dtype=result.dtype, nodata=nodata) + + with MemoryFile() as memfile: + dst = memfile.open(**meta) + result = np.ma.filled(result, fill_value=nodata) + dst.write(result, indexes=1) + + # create RasterLayer from result + layer = RasterLayer(rasterio.band(dst, 1)) + + return layer + + def __add__(self, other): + """Implements behaviour for addition of two RasterLayers, + i.e. added_layer = layer1 + layer2 + """ + + def func(arr1, arr2): + return arr1 + arr2 + + return self._arith(func, other) + + def __sub__(self, other): + """Implements behaviour for subtraction of two RasterLayers, i.e. + subtracted_layer = layer1 - layer2 + """ + + def func(arr1, arr2): + return arr1 - arr2 + + return self._arith(func, other) + + def __mul__(self, other): + """Implements behaviour for multiplication of two RasterLayers, i.e. + product = layer1 * layer2 + """ + + def func(arr1, arr2): + return arr1 * arr2 + + return self._arith(func, other) + + def __truediv__(self, other): + """Implements behaviour for division using `/` of two RasterLayers, + i.e. div = layer1 / layer2 + """ + + def func(arr1, arr2): + return arr1 / arr2 + + return self._arith(func, other) + + def __and__(self, other): + """Implements & operator + + Equivalent to a intersection operation of self + with other, i.e. intersected = layer1 & layer2. + """ + + def func(arr1, arr2): + mask = np.logical_and(arr1, arr2).mask + arr1.mask[mask] = True + return arr1 + + return self._arith(func, other) + + def __or__(self, other): + """Implements | operator + + Fills gaps in self with pixels from other. Equivalent to a union + operation, i.e. union = layer1 | layer2. + """ + + def func(arr1, arr2): + idx = np.logical_or(arr1, arr2.mask).mask + arr1[idx] = arr2[idx] + return arr1 + + return self._arith(func, other) + + def __xor__(self, other): + """Exclusive OR using ^ + + Equivalent to a symmetrical difference where the result comprises + pixels that occur in self or other, but not both, i.e. + xor = layer1 ^ layer2. + """ + + def func(arr1, arr2): + mask = ~np.logical_xor(arr1, arr2) + idx = np.logical_or(arr1, arr2.mask).mask + arr1[idx] = arr2[idx] + arr1.mask[np.nonzero(mask)] = True + return arr1 + + return self._arith(func, other) + + def __round__(self, ndigits): + """Behaviour for round() function, i.e. round(layer)""" + + def func(arr, ndigits): + return np.round(arr, ndigits) + + func = partial(func, ndigits=ndigits) + + return self._arith(func) + + def __floor__(self): + """Rounding down to the nearest integer using math.floor(), + i.e. math.floor(layer)""" + + def func(arr): + return np.floor(arr) + + return self._arith(func) + + def __ceil__(self): + """Rounding up to the nearest integer using math.ceil(), i.e. + math.ceil(layer)""" + + def func(arr): + return np.ceil(arr) + + return self._arith(func) + + def __trunc__(self): + """Truncating to an integral using math.trunc(), i.e. + math.trunc(layer)""" + + def func(arr): + return np.trunc(arr) + + return self._arith(func) + + def __abs__(self): + """abs() function as applied to a RasterLayer, i.e. abs(layer)""" + + def func(arr): + return np.abs(arr) + + return self._arith(func) + + def __pos__(self): + """Unary positive, i.e. +layer1""" + + def func(arr): + return np.positive(arr) + + return self._arith(func) + + def __neg__(self): + """ + Unary negative, i.e. -layer1 + """ + + def func(arr): + return np.negative(arr) + + return self._arith(func) + + def read(self, **kwargs): + """Read method for a single RasterLayer. + + Reads the pixel values from a RasterLayer into a ndarray that + always will have two dimensions in the order of (rows, columns). + + Parameters + ---------- + **kwargs : named arguments that can be passed to the the + rasterio.DatasetReader.read method. + """ + if "resampling" in kwargs.keys(): + resampling_methods = [i.name for i in rasterio.enums.Resampling] + + if kwargs["resampling"] not in resampling_methods: + raise ValueError( + "Invalid resampling method. Resampling " + "method must be one of {0}:".format(resampling_methods) + ) + + kwargs["resampling"] = rasterio.enums.Resampling[kwargs["resampling"]] + + return self.ds.read(indexes=self.bidx, **kwargs) + + def seek(self, offset, whence=None): + return self + + def tell(self): + return self + + def write(self, file_path, driver="GTiff", dtype=None, nodata=None, **kwargs): + """Write method for a single RasterLayer. + + Parameters + ---------- + file_path : str (opt) + File path to save the dataset. + + driver : str + GDAL-compatible driver used for the file format. + + dtype : str (opt) + Numpy dtype used for the file. If omitted then the + RasterLayer's dtype is used. + + nodata : any number (opt) + A value used to represent the nodata pixels. If omitted + then the RasterLayer's nodata value is used (if assigned + already). + + kwargs : opt + Optional named arguments to pass to the format drivers. + For example can be `compress="deflate"` to add compression. + + Returns + ------- + pyspatialml.RasterLayer + """ + if dtype is None: + dtype = self.dtype + + if nodata is None: + nodata = get_nodata_value(dtype) + + meta = self.ds.meta + meta["driver"] = driver + meta["nodata"] = nodata + meta["dtype"] = dtype + meta.update(kwargs) + + # mask any nodata values + arr = np.ma.masked_equal(self.read(), self.nodata) + arr = arr.filled(fill_value=nodata) + + # write to file + with rasterio.open(file_path, mode="w", **meta) as dst: + dst.write(arr.astype(dtype), 1) + + src = rasterio.open(file_path) + band = rasterio.band(src, 1) + layer = RasterLayer(band) + + return layer + + def _extract_by_indices(self, rows, cols): + """Spatial query of Raster object (by-band)""" + + X = np.ma.zeros((len(rows), self.count), dtype="float32") + arr = self.read(masked=True) + X[:, 0] = arr[rows, cols] + + return X diff --git a/pyspatialml/transformers.py b/pyspatialml/transformers.py index aa18a42..72ad19c 100644 --- a/pyspatialml/transformers.py +++ b/pyspatialml/transformers.py @@ -1,429 +1,429 @@ -import numpy as np -from scipy.spatial.distance import cdist -from scipy.spatial import cKDTree -from sklearn.base import BaseEstimator, TransformerMixin -from sklearn.neighbors import NearestNeighbors -from sklearn.preprocessing import Normalizer -from sklearn.utils.extmath import weighted_mode - - -class KNNTransformer(BaseEstimator, TransformerMixin): - """Transformer to generate new lag features by weighted aggregation - of K-neighboring observations. - - A lag transformer uses a weighted mean/mode of the values of the - K-neighboring observations to generate new lagged features. The - weighted mean/mode of the surrounding observations are appended - as a new feature to the right-most column in the training data. - - The K-neighboring observations are determined using the distance - metric specified in the `metric` argument. The default metric is - minkowski, and with p=2 is equivalent to the standard Euclidean - metric. - - Parameters - ---------- - n_neighbors : int, default = 7 - Number of neighbors to use by default for kneighbors queries. - - weights : {‘uniform’, ‘distance’} or callable, default=’distance’ - Weight function used in prediction. Possible values: - - - ‘uniform’ : uniform weights. All points in each - neighborhood are weighted equally. - - ‘distance’ : weight points by the inverse of their - distance. In this case, closer neighbors of a query - point will have a greater influence than neighbors - which are further away. - - [callable] : a user-defined function which accepts an - array of distances, and returns an array of the same - shape containing the weights. - - measure : {'mean', 'mode'} - Function that is used to apply the weights to `y`. Use 'mean' - if the target variable is continuous and 'mode' if the target - variable is discrete. - - radius : float, default=1.0 - Range of parameter space to use by default for radius_neighbors - queries. - - algorithm: {‘auto’, ‘ball_tree’, ‘kd_tree’, ‘brute’}, default=’auto’ - Algorithm used to compute the nearest neighbors: - - - ‘ball_tree’ will use BallTree - - ‘kd_tree’ will use KDTree - - ‘brute’ will use a brute-force search. - - ‘auto’ will attempt to decide the most appropriate - algorithm based on the values passed to fit method. - - Note: fitting on sparse input will override the setting - of this parameter, using brute force. - - leaf_size : int, default=30 - Leaf size passed to BallTree or KDTree. This can affect the - speed of the construction and query, as well as the memory - required to store the tree. The optimal value depends on the - nature of the problem. - - metric : str or callable, default=’minkowski’ - The distance metric to use for the tree. The default metric is - minkowski, and with p=2 is equivalent to the standard - Euclidean metric. See the documentation of DistanceMetric for - a list of available metrics. If metric is “precomputed”, X is - assumed to be a distance matrix and must be square during fit. - X may be a sparse graph, in which case only “nonzero” elements - may be considered neighbors. - - p : int, default=2 - Parameter for the Minkowski metric from - sklearn.metrics.pairwise.pairwise_distances. When p = 1, this - is equivalent to using manhattan_distance (l1), and - euclidean_distance (l2) for p = 2. For arbitrary p, - minkowski_distance (l_p) is used. - - normalize : bool, default=True - Whether to normalize the inputs using - sklearn.preprocessing.Normalizer - - metric_params : dict, default=None - Additional keyword arguments for the metric function. - - kernel_params : dict, default=None - Additional keyword arguments to pass to a custom kernel - function. - - n_jobs : int, default=None - The number of parallel jobs to run for neighbors search. None - means 1 unless in a joblib.parallel_backend context. -1 means - using all processors. See Glossary for more details. - """ - - def __init__( - self, - n_neighbors=7, - weights="distance", - measure="mean", - radius=1.0, - algorithm="auto", - leaf_size=30, - metric="minkowski", - p=2, - normalize=True, - metric_params=None, - kernel_params=None, - n_jobs=1, - ): - - self.n_neighbors = n_neighbors - self.weights = weights - self.measure = measure - self.radius = radius - self.algorithm = algorithm - self.leaf_size = leaf_size - self.metric = metric - self.p = p - self.metric_params = metric_params - self.kernel_params = kernel_params - self.normalize = normalize - self.n_jobs = n_jobs - - self.knn = NearestNeighbors( - n_neighbors=self.n_neighbors, - radius=self.radius, - algorithm=self.algorithm, - leaf_size=self.leaf_size, - metric=self.metric, - p=self.p, - metric_params=self.metric_params, - n_jobs=self.n_jobs, - ) - - self.y_ = None - - def fit(self, X, y=None): - """Fit the base_estimator with features from X - {n_samples, n_features} and with an additional spatially lagged - variable added to the right-most column of the training data. - - During fitting, the k-neighbors to each training point are - used to estimate the spatial lag component. The training point - is not included in the calculation, i.e. the training point is - not considered its own neighbor. - - Parameters - ---------- - X : array-like of sample {n_samples, n_features} using for model - fitting The training input samples - - y : array-like of shape (n_samples,) - The target values (class labels in classification, real - numbers in regression). - """ - # some checks - if self.kernel_params is None: - self.kernel_params = {} - - if y.ndim == 1: - self.n_outputs_ = 1 - else: - self.n_outputs_ = y.shape[1] - - # fit knn and get values of neighbors - if self.normalize is True: - scaler = Normalizer() - X = scaler.fit_transform(X) - self.scaler_ = scaler - - self.knn.fit(X) - self.y_ = y.copy() - - return self - - def transform(self, X, y=None): - """Transform method for spatial lag models. - - Augments new observations with a spatial lag variable created - from a weighted mean/mode (regression/classification) of - k-neighboring observations. - - Parameters - ---------- - X : array-like of sample {n_samples, n_features} - New samples for the prediction. - - y : None - Not used. - """ - # get distances from training points to new data - if self.normalize is True: - X = self.scaler_.transform(X) - - neighbor_dist, neighbor_ids = self.knn.kneighbors(X=X) - - # mask zero distances - neighbor_dist = np.ma.masked_equal(neighbor_dist, 0) - - # get values of closest training points to new data - neighbor_vals = np.array([self.y_[i] for i in neighbor_ids]) - - # mask neighbor values with zero distances - mask = neighbor_dist.mask - - if mask.all() == False: - mask = np.zeros(neighbor_dist.shape, dtype=bool) - mask[:] = False - - if neighbor_vals.ndim == 2: - neighbor_vals = np.ma.masked_array(neighbor_vals, mask) - else: - n_outputs = neighbor_vals.shape[2] - mask = np.repeat(mask[:, :, np.newaxis], n_outputs, axis=2) - neighbor_vals = np.ma.masked_array(neighbor_vals, mask=mask) - - # calculated weighted means - if self.weights == "distance": - new_X = self._distance_weighting(neighbor_vals, neighbor_dist) - - elif self.weights == "uniform": - new_X = self._uniform_weighting(neighbor_vals) - - elif callable(self.weights): - new_X = self._custom_weighting(neighbor_vals, neighbor_dist) - - return np.column_stack((X, new_X)) - - def _apply_weights(self, neighbor_vals, neighbor_weights): - # weighted mean/mode of neighbors for a single regression target - if neighbor_vals.ndim == 2: - if self.measure == "mean": - X = np.ma.average(neighbor_vals, weights=neighbor_weights, axis=1) - else: - X, _ = weighted_mode(neighbor_vals, neighbor_weights, axis=1) - - # weighted mean of neighbors for a multi-target regression - # neighbor_vals = (n_samples, n_neighbors, n_targets) - else: - X = np.zeros((neighbor_vals.shape[0], neighbor_vals.shape[2])) - - if self.measure == "mean": - for i in range(neighbor_vals.shape[-1]): - X[:, i] = np.ma.average( - neighbor_vals[:, :, i], weights=neighbor_weights, axis=1 - ) - else: - for i in range(neighbor_vals.shape[-1]): - X[:, i], _ = weighted_mode( - neighbor_vals[:, :, i], neighbor_weights, axis=1 - ) - - return X - - def _distance_weighting(self, neighbor_vals, neighbor_dist): - weights = 1 / neighbor_dist - return self._apply_weights(neighbor_vals, weights) - - def _uniform_weighting(self, neighbor_vals): - weights = np.ones((neighbor_vals.shape[0], neighbor_vals.shape[0])) - return self._apply_weights(neighbor_vals, weights) - - def _custom_weighting(self, neighbor_vals, neighbor_dist): - weights = self.weights(neighbor_dist, **self.kernel_params) - return self._apply_weights(neighbor_vals, weights) - - def _distance_weighting(self, neighbor_vals, neighbor_dist): - weights = 1 / neighbor_dist - return self._apply_weights(neighbor_vals, weights) - - def _uniform_weighting(self, neighbor_vals): - weights = np.ones((neighbor_vals.shape[0], neighbor_vals.shape[0])) - return self._apply_weights(neighbor_vals, weights) - - def _custom_weighting(self, neighbor_vals, neighbor_dist): - weights = self.weights(neighbor_dist, **self.kernel_params) - return self._apply_weights(neighbor_vals, weights) - - -class GeoDistTransformer(BaseEstimator, TransformerMixin): - """Transformer to add new features based on geographical distances - to reference locations. - - Parameters - ---------- - refs : ndarray - Array of coordinates of reference locations in - (m, n-dimensional) order, such as {n_locations, - x_coordinates, y_coordinates, ...} for as many dimensions as - required. For example to calculate distances to a single x,y,z - location: - - refs = [-57.345, -110.134, 1012] - - And to calculate distances to three x,y reference locations: - - refs = [ - [-57.345, -110.134], - [-56.345, -109.123], - [-58.534, -112.123] - ] - - The supplied array has to have at least x,y coordinates with a - (1, 2) shape for a single location. - - minimum : bool, default is False - Optionally calculate the minimum distance to the combined - reference locations, resulting in a single new feature, - rather than a new feature for each individual reference - location. - - log : bool (opt), default=False - Optionally log-transform the distance measures. - - Returns - ------- - X_new : ndarray - Array of shape (n_samples, n_features) with new geodistance - features appended to the right-most columns of the array. - """ - - def __init__(self, refs, minimum=False, log=False): - self.refs = refs - self.log = log - self.refs_ = None - self.minimum = minimum - - def fit(self, X, y=None): - self.refs_ = np.asarray(self.refs) - - if self.refs_.ndim < 2: - raise ValueError( - "`refs` has to be a m,n-dimensional array with at least two dimensions" - ) - - return self - - def transform(self, X, y=None): - if self.minimum is False: - dists = cdist(self.refs_, X).transpose() - - if self.minimum is True: - tree = cKDTree(self.refs_) - dists, _ = tree.query(X) - - if self.log is True: - dists = np.log(dists) - - return np.column_stack((X, dists)) - - -class AspectTransformer(BaseEstimator, TransformerMixin): - """Transformer to decompose aspect maps into northerness and easterness""" - - def __init__(self): - self._quadrants = None - self._reverse = np.array([0.0, 360.0]) - - def _dir_from_comp(self, X): - return np.rad2deg(np.arctan2(X[:, 1], X[:, 0])) - - def fit(self, X, y=None): - return self - - def fit_transform(self, X, y=None): - return self.transform(X, y) - - def transform(self, X, y=None): - """Takes a vector of floating point numbers, assumed to be aspect in degrees - and returns an array with two dimensions with the strength of the easterly and - northernly direction - - Parameters - ---------- - X : array-like of sample {n_samples, n_features} - New samples for the prediction. - - y : None - Not used. - - Returns - ------- - ndarray : 2d array with the sin and cosine components of the original - data. - """ - # ensure that X is a ndarray - if isinstance(X, list): - X = np.asarray(X) - - # store quadrant so that inverse can be found - condlist = [np.logical_and(X >= 0, X <= 180), np.logical_and(X > 180, X < 360)] - choicelist = np.arange(0, 2) - self._quadrants = np.select(condlist, choicelist) - - radians = np.deg2rad(X) - easterness = np.cos(radians) - northerness = np.sin(radians) - - return np.column_stack([easterness, northerness]) - - def inverse_transform(self, X, y=None): - """Takes a vector with two columns containing the strength of the easterly and - northernly directions and returns a array with a single dimension with the - azimuth in degrees - - Because the quadrant is not known, this function only returns the azimuth - relative to a northernly direction - - Parameters - ---------- - X : array-like of sample {n_samples, n_features} - New samples for the prediction. - - y : None - Not used. - - Returns - ------- - ndarray : 2d array with the azimuth. - """ - inverse = self._dir_from_comp(X) - inverse = np.abs(inverse + self._reverse[self._quadrants]) - return inverse +import numpy as np +from scipy.spatial.distance import cdist +from scipy.spatial import cKDTree +from sklearn.base import BaseEstimator, TransformerMixin +from sklearn.neighbors import NearestNeighbors +from sklearn.preprocessing import Normalizer +from sklearn.utils.extmath import weighted_mode + + +class KNNTransformer(BaseEstimator, TransformerMixin): + """Transformer to generate new lag features by weighted aggregation + of K-neighboring observations. + + A lag transformer uses a weighted mean/mode of the values of the + K-neighboring observations to generate new lagged features. The + weighted mean/mode of the surrounding observations are appended + as a new feature to the right-most column in the training data. + + The K-neighboring observations are determined using the distance + metric specified in the `metric` argument. The default metric is + minkowski, and with p=2 is equivalent to the standard Euclidean + metric. + + Parameters + ---------- + n_neighbors : int, default = 7 + Number of neighbors to use by default for kneighbors queries. + + weights : {‘uniform’, ‘distance’} or callable, default=’distance’ + Weight function used in prediction. Possible values: + + - ‘uniform’ : uniform weights. All points in each + neighborhood are weighted equally. + - ‘distance’ : weight points by the inverse of their + distance. In this case, closer neighbors of a query + point will have a greater influence than neighbors + which are further away. + - [callable] : a user-defined function which accepts an + array of distances, and returns an array of the same + shape containing the weights. + + measure : {'mean', 'mode'} + Function that is used to apply the weights to `y`. Use 'mean' + if the target variable is continuous and 'mode' if the target + variable is discrete. + + radius : float, default=1.0 + Range of parameter space to use by default for radius_neighbors + queries. + + algorithm: {‘auto’, ‘ball_tree’, ‘kd_tree’, ‘brute’}, default=’auto’ + Algorithm used to compute the nearest neighbors: + + - ‘ball_tree’ will use BallTree + - ‘kd_tree’ will use KDTree + - ‘brute’ will use a brute-force search. + - ‘auto’ will attempt to decide the most appropriate + algorithm based on the values passed to fit method. + - Note: fitting on sparse input will override the setting + of this parameter, using brute force. + + leaf_size : int, default=30 + Leaf size passed to BallTree or KDTree. This can affect the + speed of the construction and query, as well as the memory + required to store the tree. The optimal value depends on the + nature of the problem. + + metric : str or callable, default=’minkowski’ + The distance metric to use for the tree. The default metric is + minkowski, and with p=2 is equivalent to the standard + Euclidean metric. See the documentation of DistanceMetric for + a list of available metrics. If metric is “precomputed”, X is + assumed to be a distance matrix and must be square during fit. + X may be a sparse graph, in which case only “nonzero” elements + may be considered neighbors. + + p : int, default=2 + Parameter for the Minkowski metric from + sklearn.metrics.pairwise.pairwise_distances. When p = 1, this + is equivalent to using manhattan_distance (l1), and + euclidean_distance (l2) for p = 2. For arbitrary p, + minkowski_distance (l_p) is used. + + normalize : bool, default=True + Whether to normalize the inputs using + sklearn.preprocessing.Normalizer + + metric_params : dict, default=None + Additional keyword arguments for the metric function. + + kernel_params : dict, default=None + Additional keyword arguments to pass to a custom kernel + function. + + n_jobs : int, default=None + The number of parallel jobs to run for neighbors search. None + means 1 unless in a joblib.parallel_backend context. -1 means + using all processors. See Glossary for more details. + """ + + def __init__( + self, + n_neighbors=7, + weights="distance", + measure="mean", + radius=1.0, + algorithm="auto", + leaf_size=30, + metric="minkowski", + p=2, + normalize=True, + metric_params=None, + kernel_params=None, + n_jobs=1, + ): + + self.n_neighbors = n_neighbors + self.weights = weights + self.measure = measure + self.radius = radius + self.algorithm = algorithm + self.leaf_size = leaf_size + self.metric = metric + self.p = p + self.metric_params = metric_params + self.kernel_params = kernel_params + self.normalize = normalize + self.n_jobs = n_jobs + + self.knn = NearestNeighbors( + n_neighbors=self.n_neighbors, + radius=self.radius, + algorithm=self.algorithm, + leaf_size=self.leaf_size, + metric=self.metric, + p=self.p, + metric_params=self.metric_params, + n_jobs=self.n_jobs, + ) + + self.y_ = None + + def fit(self, X, y=None): + """Fit the base_estimator with features from X + {n_samples, n_features} and with an additional spatially lagged + variable added to the right-most column of the training data. + + During fitting, the k-neighbors to each training point are + used to estimate the spatial lag component. The training point + is not included in the calculation, i.e. the training point is + not considered its own neighbor. + + Parameters + ---------- + X : array-like of sample {n_samples, n_features} using for model + fitting The training input samples + + y : array-like of shape (n_samples,) + The target values (class labels in classification, real + numbers in regression). + """ + # some checks + if self.kernel_params is None: + self.kernel_params = {} + + if y.ndim == 1: + self.n_outputs_ = 1 + else: + self.n_outputs_ = y.shape[1] + + # fit knn and get values of neighbors + if self.normalize is True: + scaler = Normalizer() + X = scaler.fit_transform(X) + self.scaler_ = scaler + + self.knn.fit(X) + self.y_ = y.copy() + + return self + + def transform(self, X, y=None): + """Transform method for spatial lag models. + + Augments new observations with a spatial lag variable created + from a weighted mean/mode (regression/classification) of + k-neighboring observations. + + Parameters + ---------- + X : array-like of sample {n_samples, n_features} + New samples for the prediction. + + y : None + Not used. + """ + # get distances from training points to new data + if self.normalize is True: + X = self.scaler_.transform(X) + + neighbor_dist, neighbor_ids = self.knn.kneighbors(X=X) + + # mask zero distances + neighbor_dist = np.ma.masked_equal(neighbor_dist, 0) + + # get values of closest training points to new data + neighbor_vals = np.array([self.y_[i] for i in neighbor_ids]) + + # mask neighbor values with zero distances + mask = neighbor_dist.mask + + if mask.all() == False: + mask = np.zeros(neighbor_dist.shape, dtype=bool) + mask[:] = False + + if neighbor_vals.ndim == 2: + neighbor_vals = np.ma.masked_array(neighbor_vals, mask) + else: + n_outputs = neighbor_vals.shape[2] + mask = np.repeat(mask[:, :, np.newaxis], n_outputs, axis=2) + neighbor_vals = np.ma.masked_array(neighbor_vals, mask=mask) + + # calculated weighted means + if self.weights == "distance": + new_X = self._distance_weighting(neighbor_vals, neighbor_dist) + + elif self.weights == "uniform": + new_X = self._uniform_weighting(neighbor_vals) + + elif callable(self.weights): + new_X = self._custom_weighting(neighbor_vals, neighbor_dist) + + return np.column_stack((X, new_X)) + + def _apply_weights(self, neighbor_vals, neighbor_weights): + # weighted mean/mode of neighbors for a single regression target + if neighbor_vals.ndim == 2: + if self.measure == "mean": + X = np.ma.average(neighbor_vals, weights=neighbor_weights, axis=1) + else: + X, _ = weighted_mode(neighbor_vals, neighbor_weights, axis=1) + + # weighted mean of neighbors for a multi-target regression + # neighbor_vals = (n_samples, n_neighbors, n_targets) + else: + X = np.zeros((neighbor_vals.shape[0], neighbor_vals.shape[2])) + + if self.measure == "mean": + for i in range(neighbor_vals.shape[-1]): + X[:, i] = np.ma.average( + neighbor_vals[:, :, i], weights=neighbor_weights, axis=1 + ) + else: + for i in range(neighbor_vals.shape[-1]): + X[:, i], _ = weighted_mode( + neighbor_vals[:, :, i], neighbor_weights, axis=1 + ) + + return X + + def _distance_weighting(self, neighbor_vals, neighbor_dist): + weights = 1 / neighbor_dist + return self._apply_weights(neighbor_vals, weights) + + def _uniform_weighting(self, neighbor_vals): + weights = np.ones((neighbor_vals.shape[0], neighbor_vals.shape[0])) + return self._apply_weights(neighbor_vals, weights) + + def _custom_weighting(self, neighbor_vals, neighbor_dist): + weights = self.weights(neighbor_dist, **self.kernel_params) + return self._apply_weights(neighbor_vals, weights) + + def _distance_weighting(self, neighbor_vals, neighbor_dist): + weights = 1 / neighbor_dist + return self._apply_weights(neighbor_vals, weights) + + def _uniform_weighting(self, neighbor_vals): + weights = np.ones((neighbor_vals.shape[0], neighbor_vals.shape[0])) + return self._apply_weights(neighbor_vals, weights) + + def _custom_weighting(self, neighbor_vals, neighbor_dist): + weights = self.weights(neighbor_dist, **self.kernel_params) + return self._apply_weights(neighbor_vals, weights) + + +class GeoDistTransformer(BaseEstimator, TransformerMixin): + """Transformer to add new features based on geographical distances + to reference locations. + + Parameters + ---------- + refs : ndarray + Array of coordinates of reference locations in + (m, n-dimensional) order, such as {n_locations, + x_coordinates, y_coordinates, ...} for as many dimensions as + required. For example to calculate distances to a single x,y,z + location: + + refs = [-57.345, -110.134, 1012] + + And to calculate distances to three x,y reference locations: + + refs = [ + [-57.345, -110.134], + [-56.345, -109.123], + [-58.534, -112.123] + ] + + The supplied array has to have at least x,y coordinates with a + (1, 2) shape for a single location. + + minimum : bool, default is False + Optionally calculate the minimum distance to the combined + reference locations, resulting in a single new feature, + rather than a new feature for each individual reference + location. + + log : bool (opt), default=False + Optionally log-transform the distance measures. + + Returns + ------- + X_new : ndarray + Array of shape (n_samples, n_features) with new geodistance + features appended to the right-most columns of the array. + """ + + def __init__(self, refs, minimum=False, log=False): + self.refs = refs + self.log = log + self.refs_ = None + self.minimum = minimum + + def fit(self, X, y=None): + self.refs_ = np.asarray(self.refs) + + if self.refs_.ndim < 2: + raise ValueError( + "`refs` has to be a m,n-dimensional array with at least two dimensions" + ) + + return self + + def transform(self, X, y=None): + if self.minimum is False: + dists = cdist(self.refs_, X).transpose() + + if self.minimum is True: + tree = cKDTree(self.refs_) + dists, _ = tree.query(X) + + if self.log is True: + dists = np.log(dists) + + return np.column_stack((X, dists)) + + +class AspectTransformer(BaseEstimator, TransformerMixin): + """Transformer to decompose aspect maps into northerness and easterness""" + + def __init__(self): + self._quadrants = None + self._reverse = np.array([0.0, 360.0]) + + def _dir_from_comp(self, X): + return np.rad2deg(np.arctan2(X[:, 1], X[:, 0])) + + def fit(self, X, y=None): + return self + + def fit_transform(self, X, y=None): + return self.transform(X, y) + + def transform(self, X, y=None): + """Takes a vector of floating point numbers, assumed to be aspect in degrees + and returns an array with two dimensions with the strength of the easterly and + northernly direction + + Parameters + ---------- + X : array-like of sample {n_samples, n_features} + New samples for the prediction. + + y : None + Not used. + + Returns + ------- + ndarray : 2d array with the sin and cosine components of the original + data. + """ + # ensure that X is a ndarray + if isinstance(X, list): + X = np.asarray(X) + + # store quadrant so that inverse can be found + condlist = [np.logical_and(X >= 0, X <= 180), np.logical_and(X > 180, X < 360)] + choicelist = np.arange(0, 2) + self._quadrants = np.select(condlist, choicelist) + + radians = np.deg2rad(X) + easterness = np.cos(radians) + northerness = np.sin(radians) + + return np.column_stack([easterness, northerness]) + + def inverse_transform(self, X, y=None): + """Takes a vector with two columns containing the strength of the easterly and + northernly directions and returns a array with a single dimension with the + azimuth in degrees + + Because the quadrant is not known, this function only returns the azimuth + relative to a northernly direction + + Parameters + ---------- + X : array-like of sample {n_samples, n_features} + New samples for the prediction. + + y : None + Not used. + + Returns + ------- + ndarray : 2d array with the azimuth. + """ + inverse = self._dir_from_comp(X) + inverse = np.abs(inverse + self._reverse[self._quadrants]) + return inverse diff --git a/pyspatialml/vector.py b/pyspatialml/vector.py index c2055d0..2dfa1de 100644 --- a/pyspatialml/vector.py +++ b/pyspatialml/vector.py @@ -1,61 +1,61 @@ -import random - -from scipy.cluster.hierarchy import cut_tree, linkage -from shapely.geometry import Point - - -def filter_points(gdf, min_dist=0, remove="first"): - """Filter points in geodataframe using a minimum distance buffer. - - Parameters - ---------- - gdf : Geopandas GeoDataFrame - Containing point geometries. - - min_dist : int or float, optional (default=0) - Minimum distance by which to filter out closely spaced points. - - remove : str, optional (default='first') - Optionally choose to remove 'first' occurrences or 'last' - occurrences. - - Returns - ------- - xy : 2d array-like - Numpy array filtered coordinates - """ - xy = gdf.geometry.bounds.iloc[:, 0:2] - - Z = linkage(xy, "complete") - tree_thres = cut_tree(Z, height=min_dist) - gdf["tree_thres"] = tree_thres - - if remove == "first": - gdf = gdf.groupby(by="tree_thres").first() - - elif remove == "last": - gdf = gdf.groupby(by="tree_thres").last() - - return gdf - - -def get_random_point_in_polygon(poly): - """Generates random shapely Point geometry objects within a single - shapely Polygon object. - - Parameters - ---------- - poly : Shapely Polygon object - - Returns - ------- - p : Shapely Point object - """ - - (minx, miny, maxx, maxy) = poly.bounds - - while True: - p = Point(random.uniform(minx, maxx), random.uniform(miny, maxy)) - - if poly.contains(p): - return p +import random + +from scipy.cluster.hierarchy import cut_tree, linkage +from shapely.geometry import Point + + +def filter_points(gdf, min_dist=0, remove="first"): + """Filter points in geodataframe using a minimum distance buffer. + + Parameters + ---------- + gdf : Geopandas GeoDataFrame + Containing point geometries. + + min_dist : int or float, optional (default=0) + Minimum distance by which to filter out closely spaced points. + + remove : str, optional (default='first') + Optionally choose to remove 'first' occurrences or 'last' + occurrences. + + Returns + ------- + xy : 2d array-like + Numpy array filtered coordinates + """ + xy = gdf.geometry.bounds.iloc[:, 0:2] + + Z = linkage(xy, "complete") + tree_thres = cut_tree(Z, height=min_dist) + gdf["tree_thres"] = tree_thres + + if remove == "first": + gdf = gdf.groupby(by="tree_thres").first() + + elif remove == "last": + gdf = gdf.groupby(by="tree_thres").last() + + return gdf + + +def get_random_point_in_polygon(poly): + """Generates random shapely Point geometry objects within a single + shapely Polygon object. + + Parameters + ---------- + poly : Shapely Polygon object + + Returns + ------- + p : Shapely Point object + """ + + (minx, miny, maxx, maxy) = poly.bounds + + while True: + p = Point(random.uniform(minx, maxx), random.uniform(miny, maxy)) + + if poly.contains(p): + return p diff --git a/reference/Raster.qmd b/reference/Raster.qmd index 167b8cb..9fca366 100644 --- a/reference/Raster.qmd +++ b/reference/Raster.qmd @@ -1,613 +1,613 @@ -# Raster { #pyspatialml.Raster } - -`Raster(self, src, crs=None, transform=None, nodata=None, file_path=None, driver=None, tempdir=tempfile.tempdir, in_memory=False)` - -Creates a collection of file-based GDAL-supported raster -datasets that share a common coordinate reference system and -geometry. - -Raster objects encapsulate RasterLayer objects, which represent -single band raster datasets that can physically be represented by -either separate single-band raster files, multi-band raster files, -or any combination of individual bands from multi-band raster and -single-band raster datasets. - -## Attributes - -| Name | Type | Description | -|-------------|--------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| files | list | A list of the raster dataset files that are used in the Raster. This does not have to be the same length as the number of RasterLayers because some files may have multiple bands. | -| meta | dict | A dict containing the raster metadata. The dict contains the following keys/values: crs : the crs object transform : the Affine.affine transform object width : width of the Raster in pixels height : height of the Raster in pixels count : number of RasterLayers within the Raster dtype : the numpy datatype that represents lowest common denominator of the different dtypes for all of the layers in the Raster. | -| names | list | A list of the RasterLayer names. | -| block_shape | tuple | The default block_shape in (rows, cols) for reading windows of data in the Raster for out-of-memory processing. | - -## Methods - -| Name | Description | -| --- | --- | -| [aggregate](#pyspatialml.Raster.aggregate) | Aggregates a raster to (usually) a coarser grid cell size. | -| [alter](#pyspatialml.Raster.alter) | Apply a fitted scikit-learn transformer to a Raster object. | -| [append](#pyspatialml.Raster.append) | Method to add new RasterLayers to a Raster object. | -| [apply](#pyspatialml.Raster.apply) | Apply user-supplied function to a Raster object. | -| [block_shapes](#pyspatialml.Raster.block_shapes) | Generator for windows for optimal reading and writing based | -| [close](#pyspatialml.Raster.close) | Close all of the RasterLayer objects in the Raster. | -| [copy](#pyspatialml.Raster.copy) | Creates a shallow copy of a Raster object | -| [crop](#pyspatialml.Raster.crop) | Crops a Raster object by the supplied bounds. | -| [drop](#pyspatialml.Raster.drop) | Drop individual RasterLayers from a Raster object | -| [extract_raster](#pyspatialml.Raster.extract_raster) | Sample a Raster object by an aligned raster of labelled pixels. | -| [extract_vector](#pyspatialml.Raster.extract_vector) | Sample a Raster/RasterLayer using a geopandas GeoDataframe | -| [extract_xy](#pyspatialml.Raster.extract_xy) | Samples pixel values using an array of xy locations. | -| [head](#pyspatialml.Raster.head) | Return the first 10 rows from the Raster as a ndarray | -| [intersect](#pyspatialml.Raster.intersect) | Perform a intersect operation on the Raster object. | -| [mask](#pyspatialml.Raster.mask) | Mask a Raster object based on the outline of shapes in a | -| [predict](#pyspatialml.Raster.predict) | Apply prediction of a scikit learn model to a Raster. | -| [predict_proba](#pyspatialml.Raster.predict_proba) | Apply class probability prediction of a scikit learn model to a Raster. | -| [read](#pyspatialml.Raster.read) | Reads data from the Raster object into a numpy array. | -| [rename](#pyspatialml.Raster.rename) | Rename a RasterLayer within the Raster object. | -| [sample](#pyspatialml.Raster.sample) | Generates a random sample of according to size, and samples | -| [scale](#pyspatialml.Raster.scale) | Standardize (centre and scale) a Raster object by | -| [set_block_shape](#pyspatialml.Raster.set_block_shape) | Set the block shape of the raster, i.e. the height and width | -| [tail](#pyspatialml.Raster.tail) | Return the last 10 rows from the Raster as a ndarray | -| [to_crs](#pyspatialml.Raster.to_crs) | Reprojects a Raster object to a different crs. | -| [to_pandas](#pyspatialml.Raster.to_pandas) | Raster to pandas DataFrame. | -| [write](#pyspatialml.Raster.write) | Write the Raster object to a file. | - -### aggregate { #pyspatialml.Raster.aggregate } - -`Raster.aggregate(out_shape, resampling='nearest', file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, **kwargs)` - -Aggregates a raster to (usually) a coarser grid cell size. - -#### Parameters - -| Name | Type | Description | Default | -|--------------|-------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------| -| `out_shape` | tuple | New shape in (rows, cols). | _required_ | -| `resampling` | str (default 'nearest') | Resampling method to use when applying decimated reads when out_shape is specified. Supported methods are: 'average', 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', 'max', 'med', 'min', 'mode', 'q1', 'q3'. | `'nearest'` | -| `file_path` | str (optional | File path to save to cropped raster. If not supplied then the aggregated raster is saved to a temporary file. | `None)` | -| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | -| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | -| `dtype` | str (optional | Coerce RasterLayers to the specified dtype. If not specified then the new intersected Raster is created using the dtype of the existing Raster dataset, which uses a dtype that can accommodate the data types of all of the individual RasterLayers. | `None)` | -| `nodata` | any number (optional | Nodata value for new dataset. If not specified then a nodata value is set based on the minimum permissible value of the Raster's dtype. Note that this does not change the pixel nodata values of the raster, it only changes the metadata of what value represents a nodata pixel. | `None)` | -| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | - -#### Returns - -| Type | Description | -|---------------------------|-----------------------------------------------| -| pyspatialml.raster.Raster | Raster object aggregated to a new pixel size. | - -### alter { #pyspatialml.Raster.alter } - -`Raster.alter(transformer, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, progress=False)` - -Apply a fitted scikit-learn transformer to a Raster object. - -Can be used to transform a raster using methods such as StandardScaler, -RobustScaler etc. - -#### Parameters - -| Name | Type | Description | Default | -|---------------|--------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------|------------| -| `transformer` | a sklearn.preprocessing.Transformer object | | _required_ | -| `file_path` | str (optional | Path to a GeoTiff raster for the prediction results. If not specified then the output is written to a temporary file. | `None)` | -| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | -| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | -| `dtype` | str (optional | Optionally specify a GDAL compatible data type when saving to file. If not specified, a data type is set based on the data type of the prediction. | `None)` | -| `nodata` | any number (optional | Nodata value for file export. If not specified then the nodata value is derived from the minimum permissible value for the given data type. | `None)` | -| `progress` | bool (default False) | Show progress bar for operation. | `False` | - -#### Returns - -| Type | Description | -|--------------------------------------------------|---------------| -| Pyspatialml.Raster object with transformed data. | | - -### append { #pyspatialml.Raster.append } - -`Raster.append(other, in_place=False)` - -Method to add new RasterLayers to a Raster object. - -Note that this modifies the Raster object in-place by default. - -#### Parameters - -| Name | Type | Description | Default | -|------------|------------------------------------------|------------------------------------------------------------------------------------------------|------------| -| `other` | Raster object, or list of Raster objects | Object to append to the Raster. | _required_ | -| `in_place` | bool (default False) | Whether to change the Raster object in-place or leave original and return a new Raster object. | `False` | - -#### Returns - -| Type | Description | -|---------------------------|--------------------------------------| -| pyspatialml.raster.Raster | Returned only if `in_place` is False | - -### apply { #pyspatialml.Raster.apply } - -`Raster.apply(function, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, progress=False, function_args={}, **kwargs)` - -Apply user-supplied function to a Raster object. - -#### Parameters - -| Name | Type | Description | Default | -|-----------------|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| -| `function` | function | Function that takes an numpy array as a single argument. | _required_ | -| `file_path` | str (optional | Optional path to save calculated Raster object. If not specified then a tempfile is used. | `None)` | -| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | -| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | -| `dtype` | str (optional | Coerce RasterLayers to the specified dtype. If not specified then the new Raster is created using the dtype of the calculation result. | `None)` | -| `nodata` | any number (optional | Nodata value for new dataset. If not specified then a nodata value is set based on the minimum permissible value of the Raster's data type. Note that this changes the values of the pixels that represent nodata pixels. | `None)` | -| `progress` | bool (default False) | Optionally show progress of transform operations. | `False` | -| `function_args` | dict(optional) | Optionally pass arguments to the `function` as a dict or keyword arguments. | `{}` | -| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | - -#### Returns - -| Type | Description | -|---------------------------|------------------------------------------| -| pyspatialml.raster.Raster | Raster containing the calculated result. | - -### block_shapes { #pyspatialml.Raster.block_shapes } - -`Raster.block_shapes(rows, cols)` - -Generator for windows for optimal reading and writing based -on the raster format Windows and returns as a tuple with xoff, -yoff, width, height. - -#### Parameters - -| Name | Type | Description | Default | -|--------|--------|-----------------------------|------------| -| `rows` | int | Height of window in rows. | _required_ | -| `cols` | int | Width of window in columns. | _required_ | - -### close { #pyspatialml.Raster.close } - -`Raster.close()` - -Close all of the RasterLayer objects in the Raster. - -Note that this will cause any rasters based on temporary files -to be removed. This is intended as a method of clearing -temporary files that may have accumulated during an analysis -session. - -### copy { #pyspatialml.Raster.copy } - -`Raster.copy(subset=None)` - -Creates a shallow copy of a Raster object - -Note that shallow in the context of a Raster object means that -an immutable copy of the object is made, however the on-disk and -in-memory file locations remain the same. - -#### Parameters - -| Name | Type | Description | Default | -|----------|--------|------------------------------------------------|-----------| -| `subset` | opt | A list of layer names to subset while copying. | `None` | - -#### Returns - -| Type | Description | -|---------------------------|---------------| -| pyspatialml.raster.Raster | | - -### crop { #pyspatialml.Raster.crop } - -`Raster.crop(bounds, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, **kwargs)` - -Crops a Raster object by the supplied bounds. - -#### Parameters - -| Name | Type | Description | Default | -|-------------|-------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| -| `bounds` | tuple | A tuple containing the bounding box to clip by in the form of (xmin, ymin, xmax, ymax). | _required_ | -| `file_path` | str (optional | File path to save to cropped raster. If not supplied then the cropped raster is saved to a temporary file. | `None)` | -| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | -| `driver` | str (default 'GTiff'). Default is 'GTiff' | Named of GDAL-supported driver for file export. | `'GTiff'` | -| `dtype` | str (optional | Coerce RasterLayers to the specified dtype. If not specified then the new intersected Raster is created using the dtype of theexisting Raster dataset, which uses a dtype that can accommodate the data types of all of the individual RasterLayers. | `None)` | -| `nodata` | any number (optional | Nodata value for new dataset. If not specified then a nodata value is set based on the minimum permissible value of the Raster's data type. Note that this does not change the pixel nodata values of the raster, it only changes the metadata of what value represents a nodata pixel. | `None)` | -| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | - -#### Returns - -| Type | Description | -|---------------------------|-------------------------------| -| pyspatialml.raster.Raster | Raster cropped to new extent. | - -### drop { #pyspatialml.Raster.drop } - -`Raster.drop(labels, in_place=False)` - -Drop individual RasterLayers from a Raster object - -Note that this modifies the Raster object in-place by default. - -#### Parameters - -| Name | Type | Description | Default | -|------------|---------------------------|-------------------------------------------------------------------------------------------------------|------------| -| `labels` | single label or list-like | Index (int) or layer name to drop. Can be a single integer or label, or a list of integers or labels. | _required_ | -| `in_place` | bool (default False) | Whether to change the Raster object in-place or leave original and return a new Raster object. | `False` | - -#### Returns - -| Type | Description | -|--------------------------------|-------------------------------------| -| pyspatialml.pyspatialml.Raster | Returned only if `in_place` is True | - -### extract_raster { #pyspatialml.Raster.extract_raster } - -`Raster.extract_raster(src, progress=False)` - -Sample a Raster object by an aligned raster of labelled pixels. - -#### Parameters - -| Name | Type | Description | Default | -|------------|-----------|-----------------------------------------------------------------------------------------|------------| -| `src` | | Single band raster containing labelled pixels as an open rasterio DatasetReader object. | _required_ | -| `progress` | bool(opt) | Show a progress bar for extraction. | `False` | - -#### Returns - -| Type | Description | -|----------------------------------|----------------------------------------------------------------------------------| -| geopandas.geopandas.GeoDataFrame | Geodataframe containing extracted data as point features if `return_array=False` | - -### extract_vector { #pyspatialml.Raster.extract_vector } - -`Raster.extract_vector(gdf, progress=False)` - -Sample a Raster/RasterLayer using a geopandas GeoDataframe -containing points, lines or polygon features. - -#### Parameters - -| Name | Type | Description | Default | -|------------|-----------|-----------------------------------------------------------------------------------------------------------------------|------------| -| `gdf` | | Containing either point, line or polygon geometries. Overlapping geometries will cause the same pixels to be sampled. | _required_ | -| `progress` | bool(opt) | Show a progress bar for extraction. | `False` | - -#### Returns - -| Type | Description | -|----------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| geopandas.geopandas.GeoDataframe | Containing extracted data as point geometries (one point per pixel). The resulting GeoDataFrame is indexed using a named pandas.MultiIndex, with `pixel_idx` index referring to the index of each pixel that was sampled, and the `geometry_idx` index referring to the index of the each geometry in the supplied `gdf`. This makes it possible to keep track of how sampled pixel relates to the original geometries, i.e. multiple pixels being extracted within the area of a single polygon that can be referred to using the `geometry_idx`. The extracted data can subsequently be joined with the attribute table of the supplied `gdf` using: training_py = geopandas.read_file(nc.polygons) df = self.stack.extract_vector(gdf=training_py) df = df.dropna() df = df.merge( right=training_py.loc[:, ("id", "label")], left_on="polygon_idx", right_on="id", right_index=True ) | - -### extract_xy { #pyspatialml.Raster.extract_xy } - -`Raster.extract_xy(xys, return_array=False, progress=False)` - -Samples pixel values using an array of xy locations. - -#### Parameters - -| Name | Type | Description | Default | -|----------------|---------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| -| `xys` | 2d array-like | x and y coordinates from which to sample the raster (n_samples, xys). | _required_ | -| `return_array` | bool(opt) | By default the extracted pixel values are returned as a geopandas.GeoDataFrame. If `return_array=True` then the extracted pixel values are returned as a tuple of numpy.ndarrays. | `False` | -| `progress` | bool(opt) | Show a progress bar for extraction. | `False` | - -#### Returns - -| Type | Description | -|----------------------------------|----------------------------------------------------------------------------------------| -| geopandas.geopandas.GeoDataframe | Containing extracted data as point geometries if `return_array=False`. | -| numpy.numpy.ndarray | 2d masked array containing sampled raster values (sample, bands) at the x,y locations. | - -### head { #pyspatialml.Raster.head } - -`Raster.head()` - -Return the first 10 rows from the Raster as a ndarray - -### intersect { #pyspatialml.Raster.intersect } - -`Raster.intersect(file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, **kwargs)` - -Perform a intersect operation on the Raster object. - -Computes the geometric intersection of the RasterLayers with -the Raster object. This will cause nodata values in any of -the rasters to be propagated through all of the output rasters. - -#### Parameters - -| Name | Type | Description | Default | -|-------------|-----------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| -| `file_path` | str (optional | File path to save to resulting Raster. If not supplied then the resulting Raster is saved to a temporary file. | `None)` | -| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | -| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | -| `dtype` | str (optional | Coerce RasterLayers to the specified dtype. If not specified then the new intersected Raster is created using the dtype of the existing Raster dataset, which uses a dtype that can accommodate the data types of all of the individual RasterLayers. | `None)` | -| `nodata` | any number (optional | Nodata value for new dataset. If not specified then a nodata value is set based on the minimum permissible value of the Raster's data type. Note that this changes the values of the pixels that represent nodata to the new value. | `None)` | -| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | - -#### Returns - -| Type | Description | -|---------------------------|------------------------------------------------------------------------------------------------| -| pyspatialml.raster.Raster | Raster with layers that are masked based on a union of all masks in the suite of RasterLayers. | - -### mask { #pyspatialml.Raster.mask } - -`Raster.mask(shapes, invert=False, crop=True, pad=False, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, **kwargs)` - -Mask a Raster object based on the outline of shapes in a -geopandas.GeoDataFrame - -#### Parameters - -| Name | Type | Description | Default | -|-------------|----------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| -| `shapes` | geopandas.geopandas.GeoDataFrame | GeoDataFrame containing masking features. | _required_ | -| `invert` | bool (default False) | If False then pixels outside shapes will be masked. If True then pixels inside shape will be masked. | `False` | -| `crop` | bool (default True) | Crop the raster to the extent of the shapes. | `True` | -| `pad` | bool (default False) | If True, the features will be padded in each direction by one half of a pixel prior to cropping raster. | `False` | -| `file_path` | str (optional | File path to save to resulting Raster. If not supplied then the resulting Raster is saved to a temporary file. | `None)` | -| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | -| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | -| `dtype` | str (optional | Coerce RasterLayers to the specified dtype. If not specified then the cropped Raster is created using the existing dtype, which usesa dtype that can accommodate the data types of all of the individual RasterLayers. | `None)` | -| `nodata` | any number (optional | Nodata value for cropped dataset. If not specified then a nodata value is set based on the minimum permissible value of the Raster's data type. Note that this changes the values of the pixels to the new nodata value, and changes the metadata of the raster. | `None)` | -| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | - -#### Returns - -| Type | Description | -|--------------------------------|----------------------------| -| pyspatialml.pyspatialml.Raster | Raster with masked layers. | - -### predict { #pyspatialml.Raster.predict } - -`Raster.predict(estimator, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, progress=False, constants=None, **kwargs)` - -Apply prediction of a scikit learn model to a Raster. - -The model can represent any scikit learn model or compatible -api with a `fit` and `predict` method. These can consist of -classification or regression models. Multi-class -classifications and multi-target regressions are also -supported. - -#### Parameters - -| Name | Type | Description | Default | -|-------------|-------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| -| `estimator` | estimator object implementing 'fit' | The object to use to fit the data. | _required_ | -| `file_path` | str (optional | Path to a GeoTiff raster for the prediction results. If not specified then the output is written to a temporary file. | `None)` | -| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | -| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export | `'GTiff'` | -| `dtype` | str (optional | Optionally specify a GDAL compatible data type when saving to file. If not specified, np.float32 is assumed. | `None)` | -| `nodata` | any number (optional | Nodata value for file export. If not specified then the nodata value is derived from the minimum permissible value for the given data type. | `None)` | -| `progress` | bool (default False) | Show progress bar for prediction. | `False` | -| `constants` | | Constant features to add to the Raster object with each value in a list or 1d ndarray representing an additional feature. If a list-like object of values os passed, then each numeric value will be appended as constant features to the last columns in the data. It is therefore important that all features including constant features are present in the same order as what was used to train the model. If a dict is passed, then the keys of the dict must refer to the names of raster layers in the Raster object. In this case, the values of the dict will replace the values of the raster layers in the Raster object. | `None` | -| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | - -#### Returns - -| Type | Description | -|---------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| pyspatialml.raster.Raster | Raster object containing prediction results as a RasterLayers. For classification and regression models, the Raster will contain a single RasterLayer, unless the model is multi-class or multi-target. Layers are named automatically as `pred_raw_n` with n = 1, 2, 3 ..n. | - -### predict_proba { #pyspatialml.Raster.predict_proba } - -`Raster.predict_proba(estimator, file_path=None, in_memory=False, indexes=None, driver='GTiff', dtype=None, nodata=None, constants=None, progress=False, **kwargs)` - -Apply class probability prediction of a scikit learn model to a Raster. - -#### Parameters - -| Name | Type | Description | Default | -|-------------|-------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| -| `estimator` | estimator object implementing 'fit' | The object to use to fit the data. | _required_ | -| `file_path` | str (optional | Path to a GeoTiff raster for the prediction results. If not specified then the output is written to a temporary file. | `None)` | -| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | -| `indexes` | list of integers (optional | List of class indices to export. In some circumstances, only a subset of the class probability estimations are desired, for instance when performing a binary classification only the probabilities for the positive class may be desired. | `None)` | -| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | -| `dtype` | str (optional | Optionally specify a GDAL compatible data type when saving to file. If not specified, a data type is set based on the data type of the prediction. | `None)` | -| `nodata` | any number (optional | Nodata value for file export. If not specified then the nodata value is derived from the minimum permissible value for the given data type. | `None)` | -| `progress` | bool (default False) | Show progress bar for prediction. | `False` | -| `constants` | | Constant features to add to the Raster object with each value in a list or 1d ndarray representing an additional feature. If a list-like object of values os passed, then each numeric value will be appended as constant features to the last columns in the data. It is therefore important that all features including constant features are present in the same order as what was used to train the model. If a dict is passed, then the keys of the dict must refer to the names of raster layers in the Raster object. In this case, the values of the dict will replace the values of the raster layers in the Raster object. | `None` | -| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | - -#### Returns - -| Type | Description | -|---------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| pyspatialml.raster.Raster | Raster containing predicted class probabilities. Each predicted class is represented by a RasterLayer object. The RasterLayers are named `prob_n` for 1,2,3..n, with `n` based on the index position of the classes, not the number of the class itself. For example, a classification model predicting classes with integer values of 1, 3, and 5 would result in three RasterLayers named 'prob_1', 'prob_2' and 'prob_3'. | - -### read { #pyspatialml.Raster.read } - -`Raster.read(masked=False, window=None, out_shape=None, resampling='nearest', as_df=False, **kwargs)` - -Reads data from the Raster object into a numpy array. - -#### Parameters - -| Name | Type | Description | Default | -|--------------|-----------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------| -| `masked` | bool (default False) | Read data into a masked array. | `False` | -| `window` | rasterio.window.Window object (optional | Tuple of col_off, row_off, width, height of a window of data to read a chunk of data into a ndarray. | `None)` | -| `out_shape` | tuple (optional | Shape of shape of array (rows, cols) to read data into using decimated reads. | `None)` | -| `resampling` | str (default 'nearest') | Resampling method to use when applying decimated reads when out_shape is specified. Supported methods are: 'average', 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', 'max', 'med', 'min', 'mode', 'q1', 'q3'. | `'nearest'` | -| `as_df` | bool (default False) | Whether to return the data as a pandas.DataFrame with columns named by the RasterLayer names. | `False` | -| `**kwargs` | dict | Other arguments to pass to rasterio.DatasetReader.read method | `{}` | - -#### Returns - -| Type | Description | -|---------|---------------------------------------------------------------------------------------| -| ndarray | Raster values in 3d ndarray with the dimensions in order of (band, row, and column). | - -### rename { #pyspatialml.Raster.rename } - -`Raster.rename(names, in_place=False)` - -Rename a RasterLayer within the Raster object. - -#### Parameters - -| Name | Type | Description | Default | -|------------|----------------------|---------------------------------------------------------------------------------------------------------|------------| -| `names` | dict | dict of old_name : new_name | _required_ | -| `in_place` | bool (default False) | Whether to change names of the Raster object in-place or leave original and return a new Raster object. | `False` | - -#### Returns - -| Type | Description | -|--------------------------------|--------------------------------------| -| pyspatialml.pyspatialml.Raster | Returned only if `in_place` is False | - -### sample { #pyspatialml.Raster.sample } - -`Raster.sample(size, strata=None, return_array=False, random_state=None)` - -Generates a random sample of according to size, and samples -the pixel values. - -#### Parameters - -| Name | Type | Description | Default | -|----------------|---------------------------------|-----------------------------------------------------------------------------------------------------------------------|------------| -| `size` | int | Number of random samples or number of samples per strata if a `strata` object is supplied. | _required_ | -| `strata` | pyspatialml Raster object (opt) | Whether to use stratified instead of random sampling. Strata can be supplied using another pyspatialml.Raster object. | `None` | -| `return_array` | bool(opt) | Optionally return extracted data as separate X and xy masked numpy arrays. | `False` | -| `random_state` | int(opt) | integer to use within random.seed. | `None` | - -#### Returns - -| Type | Description | -|-------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| pandas.pandas.DataFrame | DataFrame containing values of names of RasterLayers in the Raster if `return_array` is False. | -| tuple | A tuple containing two elements if `return_array` is True: - numpy.ndarray Numpy array of extracted raster values, typically 2d. - numpy.ndarray 2D numpy array of xy coordinates of extracted values. | - -### scale { #pyspatialml.Raster.scale } - -`Raster.scale(centre=True, scale=True, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, progress=False)` - -Standardize (centre and scale) a Raster object by -subtracting the mean and dividing by the standard deviation for -each layer in the object. - -The mean and standard deviation statistics are calculated -for each layer separately. - -#### Parameters - -| Name | Type | Description | Default | -|-------------|-----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------|------------| -| `centre` | bool | Whether to subtract the mean from each layer. | `is True` | -| `scale` | bool | Whether to divide each layer by the standard deviation of the layer. | `is True` | -| `file_path` | str (optional | Path to a GeoTiff raster for the prediction results. If not specified then the output is written to a temporary file. | `None)` | -| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | -| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | -| `dtype` | str (optional | Optionally specify a GDAL compatible data type when saving to file. If not specified, a data type is set based on the data type of the prediction. | `None)` | -| `nodata` | any number (optional | Nodata value for file export. If not specified then the nodata value is derived from the minimum permissible value for the given data type. | `None)` | -| `progress` | bool (default False) | Show progress bar for operation. | `False` | - -#### Returns - -| Type | Description | -|-----------------------------------------------|---------------| -| Pyspatialml.Raster object with rescaled data. | | - -### set_block_shape { #pyspatialml.Raster.set_block_shape } - -`Raster.set_block_shape(value)` - -Set the block shape of the raster, i.e. the height and width -of windows to read in chunks for the predict, predict_proba, -apply, and other supported-methods. - -Note block shape can also be set with `myraster.block_shape = (500, 500)` - -#### Parameters - -| Name | Type | Description | Default | -|---------|--------|-------------------------------------------------|------------| -| `value` | tuple | A tuple of (height, width) for the block window | _required_ | - -### tail { #pyspatialml.Raster.tail } - -`Raster.tail()` - -Return the last 10 rows from the Raster as a ndarray - -### to_crs { #pyspatialml.Raster.to_crs } - -`Raster.to_crs(crs, resampling='nearest', file_path=None, in_memory=False, driver='GTiff', nodata=None, n_jobs=1, warp_mem_lim=0, progress=False, **kwargs)` - -Reprojects a Raster object to a different crs. - -#### Parameters - -| Name | Type | Description | Default | -|----------------|----------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------| -| `crs` | rasterio.transform.CRS object, or dict | Example: CRS({'init': 'EPSG:4326'}) | _required_ | -| `resampling` | str (default 'nearest') | Resampling method to use. One of the following: nearest, bilinear, cubic, cubic_spline, lanczos, average, mode, max (GDAL >= 2.2), min (GDAL >= 2.2), med (GDAL >= 2.2), q1 (GDAL >= 2.2), q3 (GDAL >= 2.2) | `'nearest'` | -| `file_path` | str (optional | Optional path to save reprojected Raster object. If not specified then a tempfile is used. | `None)` | -| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | -| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | -| `nodata` | any number (optional | Nodata value for new dataset. If not specified then the existing nodata value of the Raster object is used, which can accommodate the dtypes of the individual layers in the Raster. | `None)` | -| `n_jobs` | int (default 1) | The number of warp worker threads. | `1` | -| `warp_mem_lim` | int (default 0) | The warp operation memory limit in MB. Larger values allow the warp operation to be carried out in fewer chunks. The amount of memory required to warp a 3-band uint8 2000 row x 2000 col raster to a destination of the same size is approximately 56 MB. The default (0) means 64 MB with GDAL 2.2. | `0` | -| `progress` | bool (default False) | Optionally show progress of transform operations. | `False` | -| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | - -#### Returns - -| Type | Description | -|---------------------------|--------------------------------| -| pyspatialml.raster.Raster | Raster following reprojection. | - -### to_pandas { #pyspatialml.Raster.to_pandas } - -`Raster.to_pandas(max_pixels=None, resampling='nearest')` - -Raster to pandas DataFrame. - -#### Parameters - -| Name | Type | Description | Default | -|--------------|-------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------| -| `max_pixels` | | Maximum number of pixels to sample. By default all pixels are used. | `None` | -| `resampling` | str (default 'nearest') | Resampling method to use when applying decimated reads when out_shape is specified. Supported methods are: 'average', 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', 'max', 'med', 'min', 'mode', 'q1', 'q3'. | `'nearest'` | - -#### Returns - -| Type | Description | -|-------------------------|----------------------------------------------------------------------------------------------------------| -| pandas.pandas.DataFrame | DataFrame containing values of names of RasterLayers in the Raster as columns, and pixel values as rows. | - -### write { #pyspatialml.Raster.write } - -`Raster.write(file_path, driver='GTiff', dtype=None, nodata=None, **kwargs)` - -Write the Raster object to a file. - -Overrides the write RasterBase class method, which is a partial -function of the rasterio.DatasetReader.write method. - -#### Parameters - -| Name | Type | Description | Default | -|-------------|---------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| -| `file_path` | str | File path used to save the Raster object. | _required_ | -| `driver` | str (default is 'GTiff'). | Name of GDAL driver used to save Raster data. | `'GTiff'` | -| `dtype` | str (opt | Optionally specify a numpy compatible data type when saving to file. If not specified, a data type is selected based on the data types of RasterLayers in the Raster object. | `None)` | -| `nodata` | any number (opt | Optionally assign a new nodata value when saving to file. If not specified a nodata value based on the minimum permissible value for the data types of RasterLayers in the Raster object is used. Note that this does not change the pixel nodata values of the raster, it only changes the metadata of what value represents a nodata pixel. | `None)` | -| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | - -#### Returns - -| Type | Description | -|---------------------------|------------------------------------| +# Raster { #pyspatialml.Raster } + +`Raster(self, src, crs=None, transform=None, nodata=None, file_path=None, driver=None, tempdir=tempfile.tempdir, in_memory=False)` + +Creates a collection of file-based GDAL-supported raster +datasets that share a common coordinate reference system and +geometry. + +Raster objects encapsulate RasterLayer objects, which represent +single band raster datasets that can physically be represented by +either separate single-band raster files, multi-band raster files, +or any combination of individual bands from multi-band raster and +single-band raster datasets. + +## Attributes + +| Name | Type | Description | +|-------------|--------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| files | list | A list of the raster dataset files that are used in the Raster. This does not have to be the same length as the number of RasterLayers because some files may have multiple bands. | +| meta | dict | A dict containing the raster metadata. The dict contains the following keys/values: crs : the crs object transform : the Affine.affine transform object width : width of the Raster in pixels height : height of the Raster in pixels count : number of RasterLayers within the Raster dtype : the numpy datatype that represents lowest common denominator of the different dtypes for all of the layers in the Raster. | +| names | list | A list of the RasterLayer names. | +| block_shape | tuple | The default block_shape in (rows, cols) for reading windows of data in the Raster for out-of-memory processing. | + +## Methods + +| Name | Description | +| --- | --- | +| [aggregate](#pyspatialml.Raster.aggregate) | Aggregates a raster to (usually) a coarser grid cell size. | +| [alter](#pyspatialml.Raster.alter) | Apply a fitted scikit-learn transformer to a Raster object. | +| [append](#pyspatialml.Raster.append) | Method to add new RasterLayers to a Raster object. | +| [apply](#pyspatialml.Raster.apply) | Apply user-supplied function to a Raster object. | +| [block_shapes](#pyspatialml.Raster.block_shapes) | Generator for windows for optimal reading and writing based | +| [close](#pyspatialml.Raster.close) | Close all of the RasterLayer objects in the Raster. | +| [copy](#pyspatialml.Raster.copy) | Creates a shallow copy of a Raster object | +| [crop](#pyspatialml.Raster.crop) | Crops a Raster object by the supplied bounds. | +| [drop](#pyspatialml.Raster.drop) | Drop individual RasterLayers from a Raster object | +| [extract_raster](#pyspatialml.Raster.extract_raster) | Sample a Raster object by an aligned raster of labelled pixels. | +| [extract_vector](#pyspatialml.Raster.extract_vector) | Sample a Raster/RasterLayer using a geopandas GeoDataframe | +| [extract_xy](#pyspatialml.Raster.extract_xy) | Samples pixel values using an array of xy locations. | +| [head](#pyspatialml.Raster.head) | Return the first 10 rows from the Raster as a ndarray | +| [intersect](#pyspatialml.Raster.intersect) | Perform a intersect operation on the Raster object. | +| [mask](#pyspatialml.Raster.mask) | Mask a Raster object based on the outline of shapes in a | +| [predict](#pyspatialml.Raster.predict) | Apply prediction of a scikit learn model to a Raster. | +| [predict_proba](#pyspatialml.Raster.predict_proba) | Apply class probability prediction of a scikit learn model to a Raster. | +| [read](#pyspatialml.Raster.read) | Reads data from the Raster object into a numpy array. | +| [rename](#pyspatialml.Raster.rename) | Rename a RasterLayer within the Raster object. | +| [sample](#pyspatialml.Raster.sample) | Generates a random sample of according to size, and samples | +| [scale](#pyspatialml.Raster.scale) | Standardize (centre and scale) a Raster object by | +| [set_block_shape](#pyspatialml.Raster.set_block_shape) | Set the block shape of the raster, i.e. the height and width | +| [tail](#pyspatialml.Raster.tail) | Return the last 10 rows from the Raster as a ndarray | +| [to_crs](#pyspatialml.Raster.to_crs) | Reprojects a Raster object to a different crs. | +| [to_pandas](#pyspatialml.Raster.to_pandas) | Raster to pandas DataFrame. | +| [write](#pyspatialml.Raster.write) | Write the Raster object to a file. | + +### aggregate { #pyspatialml.Raster.aggregate } + +`Raster.aggregate(out_shape, resampling='nearest', file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, **kwargs)` + +Aggregates a raster to (usually) a coarser grid cell size. + +#### Parameters + +| Name | Type | Description | Default | +|--------------|-------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------| +| `out_shape` | tuple | New shape in (rows, cols). | _required_ | +| `resampling` | str (default 'nearest') | Resampling method to use when applying decimated reads when out_shape is specified. Supported methods are: 'average', 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', 'max', 'med', 'min', 'mode', 'q1', 'q3'. | `'nearest'` | +| `file_path` | str (optional | File path to save to cropped raster. If not supplied then the aggregated raster is saved to a temporary file. | `None)` | +| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | +| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | +| `dtype` | str (optional | Coerce RasterLayers to the specified dtype. If not specified then the new intersected Raster is created using the dtype of the existing Raster dataset, which uses a dtype that can accommodate the data types of all of the individual RasterLayers. | `None)` | +| `nodata` | any number (optional | Nodata value for new dataset. If not specified then a nodata value is set based on the minimum permissible value of the Raster's dtype. Note that this does not change the pixel nodata values of the raster, it only changes the metadata of what value represents a nodata pixel. | `None)` | +| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | + +#### Returns + +| Type | Description | +|---------------------------|-----------------------------------------------| +| pyspatialml.raster.Raster | Raster object aggregated to a new pixel size. | + +### alter { #pyspatialml.Raster.alter } + +`Raster.alter(transformer, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, progress=False)` + +Apply a fitted scikit-learn transformer to a Raster object. + +Can be used to transform a raster using methods such as StandardScaler, +RobustScaler etc. + +#### Parameters + +| Name | Type | Description | Default | +|---------------|--------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------|------------| +| `transformer` | a sklearn.preprocessing.Transformer object | | _required_ | +| `file_path` | str (optional | Path to a GeoTiff raster for the prediction results. If not specified then the output is written to a temporary file. | `None)` | +| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | +| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | +| `dtype` | str (optional | Optionally specify a GDAL compatible data type when saving to file. If not specified, a data type is set based on the data type of the prediction. | `None)` | +| `nodata` | any number (optional | Nodata value for file export. If not specified then the nodata value is derived from the minimum permissible value for the given data type. | `None)` | +| `progress` | bool (default False) | Show progress bar for operation. | `False` | + +#### Returns + +| Type | Description | +|--------------------------------------------------|---------------| +| Pyspatialml.Raster object with transformed data. | | + +### append { #pyspatialml.Raster.append } + +`Raster.append(other, in_place=False)` + +Method to add new RasterLayers to a Raster object. + +Note that this modifies the Raster object in-place by default. + +#### Parameters + +| Name | Type | Description | Default | +|------------|------------------------------------------|------------------------------------------------------------------------------------------------|------------| +| `other` | Raster object, or list of Raster objects | Object to append to the Raster. | _required_ | +| `in_place` | bool (default False) | Whether to change the Raster object in-place or leave original and return a new Raster object. | `False` | + +#### Returns + +| Type | Description | +|---------------------------|--------------------------------------| +| pyspatialml.raster.Raster | Returned only if `in_place` is False | + +### apply { #pyspatialml.Raster.apply } + +`Raster.apply(function, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, progress=False, function_args={}, **kwargs)` + +Apply user-supplied function to a Raster object. + +#### Parameters + +| Name | Type | Description | Default | +|-----------------|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| +| `function` | function | Function that takes an numpy array as a single argument. | _required_ | +| `file_path` | str (optional | Optional path to save calculated Raster object. If not specified then a tempfile is used. | `None)` | +| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | +| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | +| `dtype` | str (optional | Coerce RasterLayers to the specified dtype. If not specified then the new Raster is created using the dtype of the calculation result. | `None)` | +| `nodata` | any number (optional | Nodata value for new dataset. If not specified then a nodata value is set based on the minimum permissible value of the Raster's data type. Note that this changes the values of the pixels that represent nodata pixels. | `None)` | +| `progress` | bool (default False) | Optionally show progress of transform operations. | `False` | +| `function_args` | dict(optional) | Optionally pass arguments to the `function` as a dict or keyword arguments. | `{}` | +| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | + +#### Returns + +| Type | Description | +|---------------------------|------------------------------------------| +| pyspatialml.raster.Raster | Raster containing the calculated result. | + +### block_shapes { #pyspatialml.Raster.block_shapes } + +`Raster.block_shapes(rows, cols)` + +Generator for windows for optimal reading and writing based +on the raster format Windows and returns as a tuple with xoff, +yoff, width, height. + +#### Parameters + +| Name | Type | Description | Default | +|--------|--------|-----------------------------|------------| +| `rows` | int | Height of window in rows. | _required_ | +| `cols` | int | Width of window in columns. | _required_ | + +### close { #pyspatialml.Raster.close } + +`Raster.close()` + +Close all of the RasterLayer objects in the Raster. + +Note that this will cause any rasters based on temporary files +to be removed. This is intended as a method of clearing +temporary files that may have accumulated during an analysis +session. + +### copy { #pyspatialml.Raster.copy } + +`Raster.copy(subset=None)` + +Creates a shallow copy of a Raster object + +Note that shallow in the context of a Raster object means that +an immutable copy of the object is made, however the on-disk and +in-memory file locations remain the same. + +#### Parameters + +| Name | Type | Description | Default | +|----------|--------|------------------------------------------------|-----------| +| `subset` | opt | A list of layer names to subset while copying. | `None` | + +#### Returns + +| Type | Description | +|---------------------------|---------------| +| pyspatialml.raster.Raster | | + +### crop { #pyspatialml.Raster.crop } + +`Raster.crop(bounds, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, **kwargs)` + +Crops a Raster object by the supplied bounds. + +#### Parameters + +| Name | Type | Description | Default | +|-------------|-------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| +| `bounds` | tuple | A tuple containing the bounding box to clip by in the form of (xmin, ymin, xmax, ymax). | _required_ | +| `file_path` | str (optional | File path to save to cropped raster. If not supplied then the cropped raster is saved to a temporary file. | `None)` | +| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | +| `driver` | str (default 'GTiff'). Default is 'GTiff' | Named of GDAL-supported driver for file export. | `'GTiff'` | +| `dtype` | str (optional | Coerce RasterLayers to the specified dtype. If not specified then the new intersected Raster is created using the dtype of theexisting Raster dataset, which uses a dtype that can accommodate the data types of all of the individual RasterLayers. | `None)` | +| `nodata` | any number (optional | Nodata value for new dataset. If not specified then a nodata value is set based on the minimum permissible value of the Raster's data type. Note that this does not change the pixel nodata values of the raster, it only changes the metadata of what value represents a nodata pixel. | `None)` | +| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | + +#### Returns + +| Type | Description | +|---------------------------|-------------------------------| +| pyspatialml.raster.Raster | Raster cropped to new extent. | + +### drop { #pyspatialml.Raster.drop } + +`Raster.drop(labels, in_place=False)` + +Drop individual RasterLayers from a Raster object + +Note that this modifies the Raster object in-place by default. + +#### Parameters + +| Name | Type | Description | Default | +|------------|---------------------------|-------------------------------------------------------------------------------------------------------|------------| +| `labels` | single label or list-like | Index (int) or layer name to drop. Can be a single integer or label, or a list of integers or labels. | _required_ | +| `in_place` | bool (default False) | Whether to change the Raster object in-place or leave original and return a new Raster object. | `False` | + +#### Returns + +| Type | Description | +|--------------------------------|-------------------------------------| +| pyspatialml.pyspatialml.Raster | Returned only if `in_place` is True | + +### extract_raster { #pyspatialml.Raster.extract_raster } + +`Raster.extract_raster(src, progress=False)` + +Sample a Raster object by an aligned raster of labelled pixels. + +#### Parameters + +| Name | Type | Description | Default | +|------------|-----------|-----------------------------------------------------------------------------------------|------------| +| `src` | | Single band raster containing labelled pixels as an open rasterio DatasetReader object. | _required_ | +| `progress` | bool(opt) | Show a progress bar for extraction. | `False` | + +#### Returns + +| Type | Description | +|----------------------------------|----------------------------------------------------------------------------------| +| geopandas.geopandas.GeoDataFrame | Geodataframe containing extracted data as point features if `return_array=False` | + +### extract_vector { #pyspatialml.Raster.extract_vector } + +`Raster.extract_vector(gdf, progress=False)` + +Sample a Raster/RasterLayer using a geopandas GeoDataframe +containing points, lines or polygon features. + +#### Parameters + +| Name | Type | Description | Default | +|------------|-----------|-----------------------------------------------------------------------------------------------------------------------|------------| +| `gdf` | | Containing either point, line or polygon geometries. Overlapping geometries will cause the same pixels to be sampled. | _required_ | +| `progress` | bool(opt) | Show a progress bar for extraction. | `False` | + +#### Returns + +| Type | Description | +|----------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| geopandas.geopandas.GeoDataframe | Containing extracted data as point geometries (one point per pixel). The resulting GeoDataFrame is indexed using a named pandas.MultiIndex, with `pixel_idx` index referring to the index of each pixel that was sampled, and the `geometry_idx` index referring to the index of the each geometry in the supplied `gdf`. This makes it possible to keep track of how sampled pixel relates to the original geometries, i.e. multiple pixels being extracted within the area of a single polygon that can be referred to using the `geometry_idx`. The extracted data can subsequently be joined with the attribute table of the supplied `gdf` using: training_py = geopandas.read_file(nc.polygons) df = self.stack.extract_vector(gdf=training_py) df = df.dropna() df = df.merge( right=training_py.loc[:, ("id", "label")], left_on="polygon_idx", right_on="id", right_index=True ) | + +### extract_xy { #pyspatialml.Raster.extract_xy } + +`Raster.extract_xy(xys, return_array=False, progress=False)` + +Samples pixel values using an array of xy locations. + +#### Parameters + +| Name | Type | Description | Default | +|----------------|---------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| +| `xys` | 2d array-like | x and y coordinates from which to sample the raster (n_samples, xys). | _required_ | +| `return_array` | bool(opt) | By default the extracted pixel values are returned as a geopandas.GeoDataFrame. If `return_array=True` then the extracted pixel values are returned as a tuple of numpy.ndarrays. | `False` | +| `progress` | bool(opt) | Show a progress bar for extraction. | `False` | + +#### Returns + +| Type | Description | +|----------------------------------|----------------------------------------------------------------------------------------| +| geopandas.geopandas.GeoDataframe | Containing extracted data as point geometries if `return_array=False`. | +| numpy.numpy.ndarray | 2d masked array containing sampled raster values (sample, bands) at the x,y locations. | + +### head { #pyspatialml.Raster.head } + +`Raster.head()` + +Return the first 10 rows from the Raster as a ndarray + +### intersect { #pyspatialml.Raster.intersect } + +`Raster.intersect(file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, **kwargs)` + +Perform a intersect operation on the Raster object. + +Computes the geometric intersection of the RasterLayers with +the Raster object. This will cause nodata values in any of +the rasters to be propagated through all of the output rasters. + +#### Parameters + +| Name | Type | Description | Default | +|-------------|-----------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| +| `file_path` | str (optional | File path to save to resulting Raster. If not supplied then the resulting Raster is saved to a temporary file. | `None)` | +| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | +| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | +| `dtype` | str (optional | Coerce RasterLayers to the specified dtype. If not specified then the new intersected Raster is created using the dtype of the existing Raster dataset, which uses a dtype that can accommodate the data types of all of the individual RasterLayers. | `None)` | +| `nodata` | any number (optional | Nodata value for new dataset. If not specified then a nodata value is set based on the minimum permissible value of the Raster's data type. Note that this changes the values of the pixels that represent nodata to the new value. | `None)` | +| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | + +#### Returns + +| Type | Description | +|---------------------------|------------------------------------------------------------------------------------------------| +| pyspatialml.raster.Raster | Raster with layers that are masked based on a union of all masks in the suite of RasterLayers. | + +### mask { #pyspatialml.Raster.mask } + +`Raster.mask(shapes, invert=False, crop=True, pad=False, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, **kwargs)` + +Mask a Raster object based on the outline of shapes in a +geopandas.GeoDataFrame + +#### Parameters + +| Name | Type | Description | Default | +|-------------|----------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| +| `shapes` | geopandas.geopandas.GeoDataFrame | GeoDataFrame containing masking features. | _required_ | +| `invert` | bool (default False) | If False then pixels outside shapes will be masked. If True then pixels inside shape will be masked. | `False` | +| `crop` | bool (default True) | Crop the raster to the extent of the shapes. | `True` | +| `pad` | bool (default False) | If True, the features will be padded in each direction by one half of a pixel prior to cropping raster. | `False` | +| `file_path` | str (optional | File path to save to resulting Raster. If not supplied then the resulting Raster is saved to a temporary file. | `None)` | +| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | +| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | +| `dtype` | str (optional | Coerce RasterLayers to the specified dtype. If not specified then the cropped Raster is created using the existing dtype, which usesa dtype that can accommodate the data types of all of the individual RasterLayers. | `None)` | +| `nodata` | any number (optional | Nodata value for cropped dataset. If not specified then a nodata value is set based on the minimum permissible value of the Raster's data type. Note that this changes the values of the pixels to the new nodata value, and changes the metadata of the raster. | `None)` | +| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | + +#### Returns + +| Type | Description | +|--------------------------------|----------------------------| +| pyspatialml.pyspatialml.Raster | Raster with masked layers. | + +### predict { #pyspatialml.Raster.predict } + +`Raster.predict(estimator, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, progress=False, constants=None, **kwargs)` + +Apply prediction of a scikit learn model to a Raster. + +The model can represent any scikit learn model or compatible +api with a `fit` and `predict` method. These can consist of +classification or regression models. Multi-class +classifications and multi-target regressions are also +supported. + +#### Parameters + +| Name | Type | Description | Default | +|-------------|-------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| +| `estimator` | estimator object implementing 'fit' | The object to use to fit the data. | _required_ | +| `file_path` | str (optional | Path to a GeoTiff raster for the prediction results. If not specified then the output is written to a temporary file. | `None)` | +| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | +| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export | `'GTiff'` | +| `dtype` | str (optional | Optionally specify a GDAL compatible data type when saving to file. If not specified, np.float32 is assumed. | `None)` | +| `nodata` | any number (optional | Nodata value for file export. If not specified then the nodata value is derived from the minimum permissible value for the given data type. | `None)` | +| `progress` | bool (default False) | Show progress bar for prediction. | `False` | +| `constants` | | Constant features to add to the Raster object with each value in a list or 1d ndarray representing an additional feature. If a list-like object of values os passed, then each numeric value will be appended as constant features to the last columns in the data. It is therefore important that all features including constant features are present in the same order as what was used to train the model. If a dict is passed, then the keys of the dict must refer to the names of raster layers in the Raster object. In this case, the values of the dict will replace the values of the raster layers in the Raster object. | `None` | +| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | + +#### Returns + +| Type | Description | +|---------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| pyspatialml.raster.Raster | Raster object containing prediction results as a RasterLayers. For classification and regression models, the Raster will contain a single RasterLayer, unless the model is multi-class or multi-target. Layers are named automatically as `pred_raw_n` with n = 1, 2, 3 ..n. | + +### predict_proba { #pyspatialml.Raster.predict_proba } + +`Raster.predict_proba(estimator, file_path=None, in_memory=False, indexes=None, driver='GTiff', dtype=None, nodata=None, constants=None, progress=False, **kwargs)` + +Apply class probability prediction of a scikit learn model to a Raster. + +#### Parameters + +| Name | Type | Description | Default | +|-------------|-------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| +| `estimator` | estimator object implementing 'fit' | The object to use to fit the data. | _required_ | +| `file_path` | str (optional | Path to a GeoTiff raster for the prediction results. If not specified then the output is written to a temporary file. | `None)` | +| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | +| `indexes` | list of integers (optional | List of class indices to export. In some circumstances, only a subset of the class probability estimations are desired, for instance when performing a binary classification only the probabilities for the positive class may be desired. | `None)` | +| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | +| `dtype` | str (optional | Optionally specify a GDAL compatible data type when saving to file. If not specified, a data type is set based on the data type of the prediction. | `None)` | +| `nodata` | any number (optional | Nodata value for file export. If not specified then the nodata value is derived from the minimum permissible value for the given data type. | `None)` | +| `progress` | bool (default False) | Show progress bar for prediction. | `False` | +| `constants` | | Constant features to add to the Raster object with each value in a list or 1d ndarray representing an additional feature. If a list-like object of values os passed, then each numeric value will be appended as constant features to the last columns in the data. It is therefore important that all features including constant features are present in the same order as what was used to train the model. If a dict is passed, then the keys of the dict must refer to the names of raster layers in the Raster object. In this case, the values of the dict will replace the values of the raster layers in the Raster object. | `None` | +| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | + +#### Returns + +| Type | Description | +|---------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| pyspatialml.raster.Raster | Raster containing predicted class probabilities. Each predicted class is represented by a RasterLayer object. The RasterLayers are named `prob_n` for 1,2,3..n, with `n` based on the index position of the classes, not the number of the class itself. For example, a classification model predicting classes with integer values of 1, 3, and 5 would result in three RasterLayers named 'prob_1', 'prob_2' and 'prob_3'. | + +### read { #pyspatialml.Raster.read } + +`Raster.read(masked=False, window=None, out_shape=None, resampling='nearest', as_df=False, **kwargs)` + +Reads data from the Raster object into a numpy array. + +#### Parameters + +| Name | Type | Description | Default | +|--------------|-----------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------| +| `masked` | bool (default False) | Read data into a masked array. | `False` | +| `window` | rasterio.window.Window object (optional | Tuple of col_off, row_off, width, height of a window of data to read a chunk of data into a ndarray. | `None)` | +| `out_shape` | tuple (optional | Shape of shape of array (rows, cols) to read data into using decimated reads. | `None)` | +| `resampling` | str (default 'nearest') | Resampling method to use when applying decimated reads when out_shape is specified. Supported methods are: 'average', 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', 'max', 'med', 'min', 'mode', 'q1', 'q3'. | `'nearest'` | +| `as_df` | bool (default False) | Whether to return the data as a pandas.DataFrame with columns named by the RasterLayer names. | `False` | +| `**kwargs` | dict | Other arguments to pass to rasterio.DatasetReader.read method | `{}` | + +#### Returns + +| Type | Description | +|---------|---------------------------------------------------------------------------------------| +| ndarray | Raster values in 3d ndarray with the dimensions in order of (band, row, and column). | + +### rename { #pyspatialml.Raster.rename } + +`Raster.rename(names, in_place=False)` + +Rename a RasterLayer within the Raster object. + +#### Parameters + +| Name | Type | Description | Default | +|------------|----------------------|---------------------------------------------------------------------------------------------------------|------------| +| `names` | dict | dict of old_name : new_name | _required_ | +| `in_place` | bool (default False) | Whether to change names of the Raster object in-place or leave original and return a new Raster object. | `False` | + +#### Returns + +| Type | Description | +|--------------------------------|--------------------------------------| +| pyspatialml.pyspatialml.Raster | Returned only if `in_place` is False | + +### sample { #pyspatialml.Raster.sample } + +`Raster.sample(size, strata=None, return_array=False, random_state=None)` + +Generates a random sample of according to size, and samples +the pixel values. + +#### Parameters + +| Name | Type | Description | Default | +|----------------|---------------------------------|-----------------------------------------------------------------------------------------------------------------------|------------| +| `size` | int | Number of random samples or number of samples per strata if a `strata` object is supplied. | _required_ | +| `strata` | pyspatialml Raster object (opt) | Whether to use stratified instead of random sampling. Strata can be supplied using another pyspatialml.Raster object. | `None` | +| `return_array` | bool(opt) | Optionally return extracted data as separate X and xy masked numpy arrays. | `False` | +| `random_state` | int(opt) | integer to use within random.seed. | `None` | + +#### Returns + +| Type | Description | +|-------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| pandas.pandas.DataFrame | DataFrame containing values of names of RasterLayers in the Raster if `return_array` is False. | +| tuple | A tuple containing two elements if `return_array` is True: - numpy.ndarray Numpy array of extracted raster values, typically 2d. - numpy.ndarray 2D numpy array of xy coordinates of extracted values. | + +### scale { #pyspatialml.Raster.scale } + +`Raster.scale(centre=True, scale=True, file_path=None, in_memory=False, driver='GTiff', dtype=None, nodata=None, progress=False)` + +Standardize (centre and scale) a Raster object by +subtracting the mean and dividing by the standard deviation for +each layer in the object. + +The mean and standard deviation statistics are calculated +for each layer separately. + +#### Parameters + +| Name | Type | Description | Default | +|-------------|-----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------|------------| +| `centre` | bool | Whether to subtract the mean from each layer. | `is True` | +| `scale` | bool | Whether to divide each layer by the standard deviation of the layer. | `is True` | +| `file_path` | str (optional | Path to a GeoTiff raster for the prediction results. If not specified then the output is written to a temporary file. | `None)` | +| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | +| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | +| `dtype` | str (optional | Optionally specify a GDAL compatible data type when saving to file. If not specified, a data type is set based on the data type of the prediction. | `None)` | +| `nodata` | any number (optional | Nodata value for file export. If not specified then the nodata value is derived from the minimum permissible value for the given data type. | `None)` | +| `progress` | bool (default False) | Show progress bar for operation. | `False` | + +#### Returns + +| Type | Description | +|-----------------------------------------------|---------------| +| Pyspatialml.Raster object with rescaled data. | | + +### set_block_shape { #pyspatialml.Raster.set_block_shape } + +`Raster.set_block_shape(value)` + +Set the block shape of the raster, i.e. the height and width +of windows to read in chunks for the predict, predict_proba, +apply, and other supported-methods. + +Note block shape can also be set with `myraster.block_shape = (500, 500)` + +#### Parameters + +| Name | Type | Description | Default | +|---------|--------|-------------------------------------------------|------------| +| `value` | tuple | A tuple of (height, width) for the block window | _required_ | + +### tail { #pyspatialml.Raster.tail } + +`Raster.tail()` + +Return the last 10 rows from the Raster as a ndarray + +### to_crs { #pyspatialml.Raster.to_crs } + +`Raster.to_crs(crs, resampling='nearest', file_path=None, in_memory=False, driver='GTiff', nodata=None, n_jobs=1, warp_mem_lim=0, progress=False, **kwargs)` + +Reprojects a Raster object to a different crs. + +#### Parameters + +| Name | Type | Description | Default | +|----------------|----------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------| +| `crs` | rasterio.transform.CRS object, or dict | Example: CRS({'init': 'EPSG:4326'}) | _required_ | +| `resampling` | str (default 'nearest') | Resampling method to use. One of the following: nearest, bilinear, cubic, cubic_spline, lanczos, average, mode, max (GDAL >= 2.2), min (GDAL >= 2.2), med (GDAL >= 2.2), q1 (GDAL >= 2.2), q3 (GDAL >= 2.2) | `'nearest'` | +| `file_path` | str (optional | Optional path to save reprojected Raster object. If not specified then a tempfile is used. | `None)` | +| `in_memory` | bool | Whether to initiated the Raster from an array and store the data in-memory using Rasterio's in-memory files. | `is False` | +| `driver` | str (default 'GTiff') | Named of GDAL-supported driver for file export. | `'GTiff'` | +| `nodata` | any number (optional | Nodata value for new dataset. If not specified then the existing nodata value of the Raster object is used, which can accommodate the dtypes of the individual layers in the Raster. | `None)` | +| `n_jobs` | int (default 1) | The number of warp worker threads. | `1` | +| `warp_mem_lim` | int (default 0) | The warp operation memory limit in MB. Larger values allow the warp operation to be carried out in fewer chunks. The amount of memory required to warp a 3-band uint8 2000 row x 2000 col raster to a destination of the same size is approximately 56 MB. The default (0) means 64 MB with GDAL 2.2. | `0` | +| `progress` | bool (default False) | Optionally show progress of transform operations. | `False` | +| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | + +#### Returns + +| Type | Description | +|---------------------------|--------------------------------| +| pyspatialml.raster.Raster | Raster following reprojection. | + +### to_pandas { #pyspatialml.Raster.to_pandas } + +`Raster.to_pandas(max_pixels=None, resampling='nearest')` + +Raster to pandas DataFrame. + +#### Parameters + +| Name | Type | Description | Default | +|--------------|-------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------| +| `max_pixels` | | Maximum number of pixels to sample. By default all pixels are used. | `None` | +| `resampling` | str (default 'nearest') | Resampling method to use when applying decimated reads when out_shape is specified. Supported methods are: 'average', 'bilinear', 'cubic', 'cubic_spline', 'gauss', 'lanczos', 'max', 'med', 'min', 'mode', 'q1', 'q3'. | `'nearest'` | + +#### Returns + +| Type | Description | +|-------------------------|----------------------------------------------------------------------------------------------------------| +| pandas.pandas.DataFrame | DataFrame containing values of names of RasterLayers in the Raster as columns, and pixel values as rows. | + +### write { #pyspatialml.Raster.write } + +`Raster.write(file_path, driver='GTiff', dtype=None, nodata=None, **kwargs)` + +Write the Raster object to a file. + +Overrides the write RasterBase class method, which is a partial +function of the rasterio.DatasetReader.write method. + +#### Parameters + +| Name | Type | Description | Default | +|-------------|---------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------| +| `file_path` | str | File path used to save the Raster object. | _required_ | +| `driver` | str (default is 'GTiff'). | Name of GDAL driver used to save Raster data. | `'GTiff'` | +| `dtype` | str (opt | Optionally specify a numpy compatible data type when saving to file. If not specified, a data type is selected based on the data types of RasterLayers in the Raster object. | `None)` | +| `nodata` | any number (opt | Optionally assign a new nodata value when saving to file. If not specified a nodata value based on the minimum permissible value for the data types of RasterLayers in the Raster object is used. Note that this does not change the pixel nodata values of the raster, it only changes the metadata of what value represents a nodata pixel. | `None)` | +| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | + +#### Returns + +| Type | Description | +|---------------------------|------------------------------------| | pyspatialml.raster.Raster | New Raster object from saved file. | \ No newline at end of file diff --git a/reference/RasterLayer.qmd b/reference/RasterLayer.qmd index f1a6684..fbfd154 100644 --- a/reference/RasterLayer.qmd +++ b/reference/RasterLayer.qmd @@ -1,205 +1,205 @@ -# RasterLayer { #pyspatialml.RasterLayer } - -`RasterLayer(self, band)` - -Represents a single raster band derived from a single or -multi-band raster dataset - -Simple wrapper around a rasterio.Band object with additional -methods. Used because the Rasterio.Band.ds.read method reads -all bands from a multi-band dataset, whereas the RasterLayer read -method only reads a single band. - -Methods encapsulated in RasterLayer objects represent those that -typically would only be applied to a single-band of a raster, i.e. -sieve-clump, distance to non-NaN pixels, or arithmetic operations -on individual layers. - -## Attributes - -| Name | Type | Description | -|---------------|---------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------| -| bidx | int | The band index of the RasterLayer within the file dataset. | -| dtype | str | The data type of the RasterLayer. | -| ds | rasterio.rasterio.band | The underlying rasterio.band object. | -| name | str | A syntactically valid name for the RasterLayer. | -| file | str | The file path to the dataset. | -| nodata | any number | The number that is used to represent nodata pixels in the RasterLayer. | -| driver | str | The name of the GDAL format driver. | -| meta | dict | A python dict storing the RasterLayer metadata. | -| transform | affine.Affine object | The affine transform parameters. | -| count | int | Number of layers; always equal to 1. | -| shape | tuple | Shape of RasterLayer in (rows, columns) | -| width, height | int | The width (cols) and height (rows) of the dataset. | -| bounds | BoundingBox named tuple | A named tuple with left, bottom, right and top coordinates of the dataset. | -| cmap | str | The name of matplotlib map, or a custom matplotlib.cm.LinearSegmentedColormap or ListedColormap object. | -| norm | matplotlib.matplotlib.colors.matplotlib.colors.Normalize(opt) | A matplotlib.colors.Normalize to apply to the RasterLayer. This overides the norm attribute of the RasterLayer. | - -## Methods - -| Name | Description | -| --- | --- | -| [max](#pyspatialml.RasterLayer.max) | Maximum value. | -| [mean](#pyspatialml.RasterLayer.mean) | Mean value | -| [median](#pyspatialml.RasterLayer.median) | Median value | -| [min](#pyspatialml.RasterLayer.min) | Minimum value. | -| [plot](#pyspatialml.RasterLayer.plot) | Plot a RasterLayer using matplotlib.pyplot.imshow | -| [read](#pyspatialml.RasterLayer.read) | Read method for a single RasterLayer. | -| [stddev](#pyspatialml.RasterLayer.stddev) | Standard deviation | -| [write](#pyspatialml.RasterLayer.write) | Write method for a single RasterLayer. | - -### max { #pyspatialml.RasterLayer.max } - -`RasterLayer.max(max_pixels=10000)` - -Maximum value. - -#### Parameters - -| Name | Type | Description | Default | -|--------------|--------|-------------------------------------------------------|-----------| -| `max_pixels` | int | Number of pixels used to inform statistical estimate. | `10000` | - -#### Returns - -| Type | Description | -|---------------------|-------------------------------------------| -| numpy.numpy.float32 | The maximum value of the object's pixels. | - -### mean { #pyspatialml.RasterLayer.mean } - -`RasterLayer.mean(max_pixels=10000)` - -Mean value - -#### Parameters - -| Name | Type | Description | Default | -|--------------|--------|-------------------------------------------------------|-----------| -| `max_pixels` | int | Number of pixels used to inform statistical estimate. | `10000` | - -#### Returns - -| Type | Description | -|---------------------|----------------------------------------| -| numpy.numpy.float32 | The mean value of the object's pixels. | - -### median { #pyspatialml.RasterLayer.median } - -`RasterLayer.median(max_pixels=10000)` - -Median value - -#### Parameters - -| Name | Type | Description | Default | -|--------------|--------|-------------------------------------------------------|-----------| -| `max_pixels` | int | Number of pixels used to inform statistical estimate. | `10000` | - -#### Returns - -| Type | Description | -|---------------------|------------------------------------------| -| numpy.numpy.float32 | The medium value of the object's pixels. | - -### min { #pyspatialml.RasterLayer.min } - -`RasterLayer.min(max_pixels=10000)` - -Minimum value. - -#### Parameters - -| Name | Type | Description | Default | -|--------------|--------|-------------------------------------------------------|-----------| -| `max_pixels` | int | Number of pixels used to inform statistical estimate. | `10000` | - -#### Returns - -| Type | Description | -|---------------------|---------------------------------| -| numpy.numpy.float32 | The minimum value of the object | - -### plot { #pyspatialml.RasterLayer.plot } - -`RasterLayer.plot(cmap=None, norm=None, ax=None, cax=None, figsize=None, out_shape=(500, 500), categorical=None, legend=False, vmin=None, vmax=None, fig_kwds=None, legend_kwds=None)` - -Plot a RasterLayer using matplotlib.pyplot.imshow - -#### Parameters - -| Name | Type | Description | Default | -|---------------|---------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------| -| `cmap` | str (default None) | The name of a colormap recognized by matplotlib. Overrides the cmap attribute of the RasterLayer. | `None` | -| `norm` | matplotlib.matplotlib.colors.matplotlib.colors.Normalize(opt) | A matplotlib.colors.Normalize to apply to the RasterLayer. This overrides the norm attribute of the RasterLayer. | `None` | -| `ax` | matplotlib.pyplot.Artist (optional | axes instance on which to draw to plot. | `None)` | -| `cax` | matplotlib.pyplot.Artist (optional | axes on which to draw the legend. | `None)` | -| `figsize` | tuple of integers (optional | Size of the matplotlib.figure.Figure. If the ax argument is given explicitly, figsize is ignored. | `None)` | -| `out_shape` | tuple | Number of rows, cols to read from the raster datasets for plotting. | `(500, 500)` | -| `categorical` | bool (optional | if True then the raster values will be considered to represent discrete values, otherwise they are considered to represent continuous values. This overrides the RasterLayer 'categorical' attribute. Setting the argument categorical to True is ignored if the RasterLayer.categorical is already True. | `False)` | -| `legend` | bool (optional | Whether to plot the legend. | `False)` | -| `vmin` | scale (optional | vmin and vmax define the data range that the colormap covers. By default, the colormap covers the complete value range of the supplied data. vmin, vmax are ignored if the norm parameter is used. | `None)` | -| `xmax` | scale (optional | vmin and vmax define the data range that the colormap covers. By default, the colormap covers the complete value range of the supplied data. vmin, vmax are ignored if the norm parameter is used. | `None)` | -| `fig_kwds` | dict (optional | Additional arguments to pass to the matplotlib.pyplot.figure call when creating the figure object. Ignored if ax is passed to the plot function. | `None)` | -| `legend_kwds` | dict (optional | Keyword arguments to pass to matplotlib.pyplot.colorbar(). | `None)` | - -#### Returns - -| Type | Description | -|--------------------------|---------------| -| matplotlib axes instance | | - -### read { #pyspatialml.RasterLayer.read } - -`RasterLayer.read(**kwargs)` - -Read method for a single RasterLayer. - -Reads the pixel values from a RasterLayer into a ndarray that -always will have two dimensions in the order of (rows, columns). - -#### Parameters - -| Name | Type | Description | Default | -|------------|-----------------------------------------------|-------------------------------------|-----------| -| `**kwargs` | named arguments that can be passed to the the | rasterio.DatasetReader.read method. | `{}` | - -### stddev { #pyspatialml.RasterLayer.stddev } - -`RasterLayer.stddev(max_pixels=10000)` - -Standard deviation - -#### Parameters - -| Name | Type | Description | Default | -|--------------|--------|-------------------------------------------------------|-----------| -| `max_pixels` | int | Number of pixels used to inform statistical estimate. | `10000` | - -#### Returns - -| Type | Description | -|---------------------|------------------------------------------------| -| numpy.numpy.float32 | The standard deviation of the object's pixels. | - -### write { #pyspatialml.RasterLayer.write } - -`RasterLayer.write(file_path, driver='GTiff', dtype=None, nodata=None, **kwargs)` - -Write method for a single RasterLayer. - -#### Parameters - -| Name | Type | Description | Default | -|-------------|------------------|----------------------------------------------------------------------------------------------------------------------------|------------| -| `file_path` | str(opt) | File path to save the dataset. | _required_ | -| `driver` | str | GDAL-compatible driver used for the file format. | `'GTiff'` | -| `dtype` | str(opt) | Numpy dtype used for the file. If omitted then the RasterLayer's dtype is used. | `None` | -| `nodata` | any number (opt) | A value used to represent the nodata pixels. If omitted then the RasterLayer's nodata value is used (if assigned already). | `None` | -| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | - -#### Returns - -| Type | Description | -|-------------------------------------|---------------| +# RasterLayer { #pyspatialml.RasterLayer } + +`RasterLayer(self, band)` + +Represents a single raster band derived from a single or +multi-band raster dataset + +Simple wrapper around a rasterio.Band object with additional +methods. Used because the Rasterio.Band.ds.read method reads +all bands from a multi-band dataset, whereas the RasterLayer read +method only reads a single band. + +Methods encapsulated in RasterLayer objects represent those that +typically would only be applied to a single-band of a raster, i.e. +sieve-clump, distance to non-NaN pixels, or arithmetic operations +on individual layers. + +## Attributes + +| Name | Type | Description | +|---------------|---------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------| +| bidx | int | The band index of the RasterLayer within the file dataset. | +| dtype | str | The data type of the RasterLayer. | +| ds | rasterio.rasterio.band | The underlying rasterio.band object. | +| name | str | A syntactically valid name for the RasterLayer. | +| file | str | The file path to the dataset. | +| nodata | any number | The number that is used to represent nodata pixels in the RasterLayer. | +| driver | str | The name of the GDAL format driver. | +| meta | dict | A python dict storing the RasterLayer metadata. | +| transform | affine.Affine object | The affine transform parameters. | +| count | int | Number of layers; always equal to 1. | +| shape | tuple | Shape of RasterLayer in (rows, columns) | +| width, height | int | The width (cols) and height (rows) of the dataset. | +| bounds | BoundingBox named tuple | A named tuple with left, bottom, right and top coordinates of the dataset. | +| cmap | str | The name of matplotlib map, or a custom matplotlib.cm.LinearSegmentedColormap or ListedColormap object. | +| norm | matplotlib.matplotlib.colors.matplotlib.colors.Normalize(opt) | A matplotlib.colors.Normalize to apply to the RasterLayer. This overides the norm attribute of the RasterLayer. | + +## Methods + +| Name | Description | +| --- | --- | +| [max](#pyspatialml.RasterLayer.max) | Maximum value. | +| [mean](#pyspatialml.RasterLayer.mean) | Mean value | +| [median](#pyspatialml.RasterLayer.median) | Median value | +| [min](#pyspatialml.RasterLayer.min) | Minimum value. | +| [plot](#pyspatialml.RasterLayer.plot) | Plot a RasterLayer using matplotlib.pyplot.imshow | +| [read](#pyspatialml.RasterLayer.read) | Read method for a single RasterLayer. | +| [stddev](#pyspatialml.RasterLayer.stddev) | Standard deviation | +| [write](#pyspatialml.RasterLayer.write) | Write method for a single RasterLayer. | + +### max { #pyspatialml.RasterLayer.max } + +`RasterLayer.max(max_pixels=10000)` + +Maximum value. + +#### Parameters + +| Name | Type | Description | Default | +|--------------|--------|-------------------------------------------------------|-----------| +| `max_pixels` | int | Number of pixels used to inform statistical estimate. | `10000` | + +#### Returns + +| Type | Description | +|---------------------|-------------------------------------------| +| numpy.numpy.float32 | The maximum value of the object's pixels. | + +### mean { #pyspatialml.RasterLayer.mean } + +`RasterLayer.mean(max_pixels=10000)` + +Mean value + +#### Parameters + +| Name | Type | Description | Default | +|--------------|--------|-------------------------------------------------------|-----------| +| `max_pixels` | int | Number of pixels used to inform statistical estimate. | `10000` | + +#### Returns + +| Type | Description | +|---------------------|----------------------------------------| +| numpy.numpy.float32 | The mean value of the object's pixels. | + +### median { #pyspatialml.RasterLayer.median } + +`RasterLayer.median(max_pixels=10000)` + +Median value + +#### Parameters + +| Name | Type | Description | Default | +|--------------|--------|-------------------------------------------------------|-----------| +| `max_pixels` | int | Number of pixels used to inform statistical estimate. | `10000` | + +#### Returns + +| Type | Description | +|---------------------|------------------------------------------| +| numpy.numpy.float32 | The medium value of the object's pixels. | + +### min { #pyspatialml.RasterLayer.min } + +`RasterLayer.min(max_pixels=10000)` + +Minimum value. + +#### Parameters + +| Name | Type | Description | Default | +|--------------|--------|-------------------------------------------------------|-----------| +| `max_pixels` | int | Number of pixels used to inform statistical estimate. | `10000` | + +#### Returns + +| Type | Description | +|---------------------|---------------------------------| +| numpy.numpy.float32 | The minimum value of the object | + +### plot { #pyspatialml.RasterLayer.plot } + +`RasterLayer.plot(cmap=None, norm=None, ax=None, cax=None, figsize=None, out_shape=(500, 500), categorical=None, legend=False, vmin=None, vmax=None, fig_kwds=None, legend_kwds=None)` + +Plot a RasterLayer using matplotlib.pyplot.imshow + +#### Parameters + +| Name | Type | Description | Default | +|---------------|---------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------| +| `cmap` | str (default None) | The name of a colormap recognized by matplotlib. Overrides the cmap attribute of the RasterLayer. | `None` | +| `norm` | matplotlib.matplotlib.colors.matplotlib.colors.Normalize(opt) | A matplotlib.colors.Normalize to apply to the RasterLayer. This overrides the norm attribute of the RasterLayer. | `None` | +| `ax` | matplotlib.pyplot.Artist (optional | axes instance on which to draw to plot. | `None)` | +| `cax` | matplotlib.pyplot.Artist (optional | axes on which to draw the legend. | `None)` | +| `figsize` | tuple of integers (optional | Size of the matplotlib.figure.Figure. If the ax argument is given explicitly, figsize is ignored. | `None)` | +| `out_shape` | tuple | Number of rows, cols to read from the raster datasets for plotting. | `(500, 500)` | +| `categorical` | bool (optional | if True then the raster values will be considered to represent discrete values, otherwise they are considered to represent continuous values. This overrides the RasterLayer 'categorical' attribute. Setting the argument categorical to True is ignored if the RasterLayer.categorical is already True. | `False)` | +| `legend` | bool (optional | Whether to plot the legend. | `False)` | +| `vmin` | scale (optional | vmin and vmax define the data range that the colormap covers. By default, the colormap covers the complete value range of the supplied data. vmin, vmax are ignored if the norm parameter is used. | `None)` | +| `xmax` | scale (optional | vmin and vmax define the data range that the colormap covers. By default, the colormap covers the complete value range of the supplied data. vmin, vmax are ignored if the norm parameter is used. | `None)` | +| `fig_kwds` | dict (optional | Additional arguments to pass to the matplotlib.pyplot.figure call when creating the figure object. Ignored if ax is passed to the plot function. | `None)` | +| `legend_kwds` | dict (optional | Keyword arguments to pass to matplotlib.pyplot.colorbar(). | `None)` | + +#### Returns + +| Type | Description | +|--------------------------|---------------| +| matplotlib axes instance | | + +### read { #pyspatialml.RasterLayer.read } + +`RasterLayer.read(**kwargs)` + +Read method for a single RasterLayer. + +Reads the pixel values from a RasterLayer into a ndarray that +always will have two dimensions in the order of (rows, columns). + +#### Parameters + +| Name | Type | Description | Default | +|------------|-----------------------------------------------|-------------------------------------|-----------| +| `**kwargs` | named arguments that can be passed to the the | rasterio.DatasetReader.read method. | `{}` | + +### stddev { #pyspatialml.RasterLayer.stddev } + +`RasterLayer.stddev(max_pixels=10000)` + +Standard deviation + +#### Parameters + +| Name | Type | Description | Default | +|--------------|--------|-------------------------------------------------------|-----------| +| `max_pixels` | int | Number of pixels used to inform statistical estimate. | `10000` | + +#### Returns + +| Type | Description | +|---------------------|------------------------------------------------| +| numpy.numpy.float32 | The standard deviation of the object's pixels. | + +### write { #pyspatialml.RasterLayer.write } + +`RasterLayer.write(file_path, driver='GTiff', dtype=None, nodata=None, **kwargs)` + +Write method for a single RasterLayer. + +#### Parameters + +| Name | Type | Description | Default | +|-------------|------------------|----------------------------------------------------------------------------------------------------------------------------|------------| +| `file_path` | str(opt) | File path to save the dataset. | _required_ | +| `driver` | str | GDAL-compatible driver used for the file format. | `'GTiff'` | +| `dtype` | str(opt) | Numpy dtype used for the file. If omitted then the RasterLayer's dtype is used. | `None` | +| `nodata` | any number (opt) | A value used to represent the nodata pixels. If omitted then the RasterLayer's nodata value is used (if assigned already). | `None` | +| `kwargs` | opt | Optional named arguments to pass to the format drivers. For example can be `compress="deflate"` to add compression. | `{}` | + +#### Returns + +| Type | Description | +|-------------------------------------|---------------| | pyspatialml.pyspatialml.RasterLayer | | \ No newline at end of file diff --git a/reference/index.qmd b/reference/index.qmd index 67141f7..c7ef519 100644 --- a/reference/index.qmd +++ b/reference/index.qmd @@ -1,26 +1,26 @@ -# Function reference {.doc .doc-index} - -## Raster datasets - -Raster is a class for reading and writing raster datasets - -| | | -| --- | --- | -| [Raster](Raster.qmd#pyspatialml.Raster) | Creates a collection of file-based GDAL-supported raster | -| [RasterLayer](RasterLayer.qmd#pyspatialml.RasterLayer) | Represents a single raster band derived from a single or | - -## Vector tools - -Tools for working with vector datasets - -| | | -| --- | --- | -| [vector](vector.qmd#pyspatialml.vector) | | - -## Preprocessing - -Preprocessing tools for raster datasets - -| | | -| --- | --- | +# Function reference {.doc .doc-index} + +## Raster datasets + +Raster is a class for reading and writing raster datasets + +| | | +| --- | --- | +| [Raster](Raster.qmd#pyspatialml.Raster) | Creates a collection of file-based GDAL-supported raster | +| [RasterLayer](RasterLayer.qmd#pyspatialml.RasterLayer) | Represents a single raster band derived from a single or | + +## Vector tools + +Tools for working with vector datasets + +| | | +| --- | --- | +| [vector](vector.qmd#pyspatialml.vector) | | + +## Preprocessing + +Preprocessing tools for raster datasets + +| | | +| --- | --- | | [preprocessing](preprocessing.qmd#pyspatialml.preprocessing) | | \ No newline at end of file diff --git a/reference/preprocessing.qmd b/reference/preprocessing.qmd index 124bf85..f429376 100644 --- a/reference/preprocessing.qmd +++ b/reference/preprocessing.qmd @@ -1,119 +1,119 @@ -# preprocessing { #pyspatialml.preprocessing } - -`preprocessing` - - - -## Functions - -| Name | Description | -| --- | --- | -| [distance_to_corners](#pyspatialml.preprocessing.distance_to_corners) | Generate buffer distances to corner and centre coordinates of raster | -| [distance_to_samples](#pyspatialml.preprocessing.distance_to_samples) | Generate buffer distances to x,y coordinates. | -| [one_hot_encode](#pyspatialml.preprocessing.one_hot_encode) | One-hot encoding of a RasterLayer. | -| [rotated_coordinates](#pyspatialml.preprocessing.rotated_coordinates) | Generate 2d arrays with n_angles rotated coordinates. | -| [xy_coordinates](#pyspatialml.preprocessing.xy_coordinates) | Fill 2d arrays with their x,y indices. | - -### distance_to_corners { #pyspatialml.preprocessing.distance_to_corners } - -`preprocessing.distance_to_corners(layer, file_path, driver='GTiff')` - -Generate buffer distances to corner and centre coordinates of raster -extent. - -#### Parameters - -| Name | Type | Description | Default | -|-------------|----------------------------------------------------|--------------------------------------------------|------------| -| `layer` | pyspatialml.RasterLayer, or rasterio.DatasetReader | | _required_ | -| `file_path` | str | File path to save to the resulting Raster object | _required_ | -| `driver` | (str, optional.optional.Default is GTiff) | GDAL driver to use to save raster. | `'GTiff'` | - -#### Returns - -| Type | Description | -|---------------------------|---------------| -| pyspatialml.Raster object | | - -### distance_to_samples { #pyspatialml.preprocessing.distance_to_samples } - -`preprocessing.distance_to_samples(layer, file_path, rows, cols, driver='GTiff')` - -Generate buffer distances to x,y coordinates. - -#### Parameters - -| Name | Type | Description | Default | -|-------------|----------------------------------------------------|---------------------------------------------------|------------| -| `layer` | pyspatialml.RasterLayer, or rasterio.DatasetReader | RasterLayer to use as a template. | _required_ | -| `file_path` | str | File path to save to the resulting Raster object. | _required_ | -| `rows` | 1d numpy array | array of row indexes. | _required_ | -| `cols` | 1d numpy array | array of column indexes. | _required_ | -| `driver` | str | GDAL driver to use to save raster. | `'GTiff'` | - -#### Returns - -| Type | Description | -|---------------------------|---------------| -| pyspatialml.Raster object | | - -### one_hot_encode { #pyspatialml.preprocessing.one_hot_encode } - -`preprocessing.one_hot_encode(layer, file_path, categories=None, driver='GTiff')` - -One-hot encoding of a RasterLayer. - -#### Parameters - -| Name | Type | Description | Default | -|--------------|-----------------------------------------|-------------------------------------------------------------------------------------------------------------------------|------------| -| `layer` | pyspatialml.pyspatialml.RasterLayer | Containing categories to perform one-hot encoding on. | _required_ | -| `file_path` | str | File path to save one-hot encoded raster. | _required_ | -| `categories` | (list, ndarray) | Optional list of categories to extract. Default performs one-hot encoding on all categorical values in the input layer. | `None` | -| `driver` | (str, options.options.Default is GTiff) | GDAL-compatible driver. | `'GTiff'` | - -#### Returns - -| Type | Description | -|--------------------------------|--------------------------------------------------------------------| -| pyspatialml.pyspatialml.Raster | Each categorical value is encoded as a layer with a Raster object. | - -### rotated_coordinates { #pyspatialml.preprocessing.rotated_coordinates } - -`preprocessing.rotated_coordinates(layer, file_path, n_angles=8, driver='GTiff')` - -Generate 2d arrays with n_angles rotated coordinates. - -#### Parameters - -| Name | Type | Description | Default | -|------------|----------------------------------------------------|--------------------------------------------------|------------| -| `layer` | pyspatialml.RasterLayer, or rasterio.DatasetReader | RasterLayer to use as a template. | _required_ | -| `n_angles` | (int, optional.optional.Default is 8) | Number of angles to rotate coordinate system by. | `8` | -| `driver` | (str, optional.optional.Default is GTiff) | GDAL driver to use to save raster. | `'GTiff'` | - -#### Returns - -| Type | Description | -|--------------------------------|---------------| -| pyspatialml.pyspatialml.Raster | | - -### xy_coordinates { #pyspatialml.preprocessing.xy_coordinates } - -`preprocessing.xy_coordinates(layer, file_path, driver='GTiff')` - -Fill 2d arrays with their x,y indices. - -#### Parameters - -| Name | Type | Description | Default | -|-------------|----------------------------------------------------|----------------------------------------------------|------------| -| `layer` | pyspatialml.RasterLayer, or rasterio.DatasetReader | RasterLayer to use as a template. | _required_ | -| `file_path` | str | File path to save to the resulting Raster object.s | _required_ | -| `driver` | (str, options.options.Default is GTiff) | GDAL driver to use to save raster. | `'GTiff'` | - -#### Returns - -| Type | Description | -|---------------------------|---------------| +# preprocessing { #pyspatialml.preprocessing } + +`preprocessing` + + + +## Functions + +| Name | Description | +| --- | --- | +| [distance_to_corners](#pyspatialml.preprocessing.distance_to_corners) | Generate buffer distances to corner and centre coordinates of raster | +| [distance_to_samples](#pyspatialml.preprocessing.distance_to_samples) | Generate buffer distances to x,y coordinates. | +| [one_hot_encode](#pyspatialml.preprocessing.one_hot_encode) | One-hot encoding of a RasterLayer. | +| [rotated_coordinates](#pyspatialml.preprocessing.rotated_coordinates) | Generate 2d arrays with n_angles rotated coordinates. | +| [xy_coordinates](#pyspatialml.preprocessing.xy_coordinates) | Fill 2d arrays with their x,y indices. | + +### distance_to_corners { #pyspatialml.preprocessing.distance_to_corners } + +`preprocessing.distance_to_corners(layer, file_path, driver='GTiff')` + +Generate buffer distances to corner and centre coordinates of raster +extent. + +#### Parameters + +| Name | Type | Description | Default | +|-------------|----------------------------------------------------|--------------------------------------------------|------------| +| `layer` | pyspatialml.RasterLayer, or rasterio.DatasetReader | | _required_ | +| `file_path` | str | File path to save to the resulting Raster object | _required_ | +| `driver` | (str, optional.optional.Default is GTiff) | GDAL driver to use to save raster. | `'GTiff'` | + +#### Returns + +| Type | Description | +|---------------------------|---------------| +| pyspatialml.Raster object | | + +### distance_to_samples { #pyspatialml.preprocessing.distance_to_samples } + +`preprocessing.distance_to_samples(layer, file_path, rows, cols, driver='GTiff')` + +Generate buffer distances to x,y coordinates. + +#### Parameters + +| Name | Type | Description | Default | +|-------------|----------------------------------------------------|---------------------------------------------------|------------| +| `layer` | pyspatialml.RasterLayer, or rasterio.DatasetReader | RasterLayer to use as a template. | _required_ | +| `file_path` | str | File path to save to the resulting Raster object. | _required_ | +| `rows` | 1d numpy array | array of row indexes. | _required_ | +| `cols` | 1d numpy array | array of column indexes. | _required_ | +| `driver` | str | GDAL driver to use to save raster. | `'GTiff'` | + +#### Returns + +| Type | Description | +|---------------------------|---------------| +| pyspatialml.Raster object | | + +### one_hot_encode { #pyspatialml.preprocessing.one_hot_encode } + +`preprocessing.one_hot_encode(layer, file_path, categories=None, driver='GTiff')` + +One-hot encoding of a RasterLayer. + +#### Parameters + +| Name | Type | Description | Default | +|--------------|-----------------------------------------|-------------------------------------------------------------------------------------------------------------------------|------------| +| `layer` | pyspatialml.pyspatialml.RasterLayer | Containing categories to perform one-hot encoding on. | _required_ | +| `file_path` | str | File path to save one-hot encoded raster. | _required_ | +| `categories` | (list, ndarray) | Optional list of categories to extract. Default performs one-hot encoding on all categorical values in the input layer. | `None` | +| `driver` | (str, options.options.Default is GTiff) | GDAL-compatible driver. | `'GTiff'` | + +#### Returns + +| Type | Description | +|--------------------------------|--------------------------------------------------------------------| +| pyspatialml.pyspatialml.Raster | Each categorical value is encoded as a layer with a Raster object. | + +### rotated_coordinates { #pyspatialml.preprocessing.rotated_coordinates } + +`preprocessing.rotated_coordinates(layer, file_path, n_angles=8, driver='GTiff')` + +Generate 2d arrays with n_angles rotated coordinates. + +#### Parameters + +| Name | Type | Description | Default | +|------------|----------------------------------------------------|--------------------------------------------------|------------| +| `layer` | pyspatialml.RasterLayer, or rasterio.DatasetReader | RasterLayer to use as a template. | _required_ | +| `n_angles` | (int, optional.optional.Default is 8) | Number of angles to rotate coordinate system by. | `8` | +| `driver` | (str, optional.optional.Default is GTiff) | GDAL driver to use to save raster. | `'GTiff'` | + +#### Returns + +| Type | Description | +|--------------------------------|---------------| +| pyspatialml.pyspatialml.Raster | | + +### xy_coordinates { #pyspatialml.preprocessing.xy_coordinates } + +`preprocessing.xy_coordinates(layer, file_path, driver='GTiff')` + +Fill 2d arrays with their x,y indices. + +#### Parameters + +| Name | Type | Description | Default | +|-------------|----------------------------------------------------|----------------------------------------------------|------------| +| `layer` | pyspatialml.RasterLayer, or rasterio.DatasetReader | RasterLayer to use as a template. | _required_ | +| `file_path` | str | File path to save to the resulting Raster object.s | _required_ | +| `driver` | (str, options.options.Default is GTiff) | GDAL driver to use to save raster. | `'GTiff'` | + +#### Returns + +| Type | Description | +|---------------------------|---------------| | pyspatialml.Raster object | | \ No newline at end of file diff --git a/reference/vector.qmd b/reference/vector.qmd index 6721953..1118946 100644 --- a/reference/vector.qmd +++ b/reference/vector.qmd @@ -1,51 +1,51 @@ -# vector { #pyspatialml.vector } - -`vector` - - - -## Functions - -| Name | Description | -| --- | --- | -| [filter_points](#pyspatialml.vector.filter_points) | Filter points in geodataframe using a minimum distance buffer. | -| [get_random_point_in_polygon](#pyspatialml.vector.get_random_point_in_polygon) | Generates random shapely Point geometry objects within a single | - -### filter_points { #pyspatialml.vector.filter_points } - -`vector.filter_points(gdf, min_dist=0, remove='first')` - -Filter points in geodataframe using a minimum distance buffer. - -#### Parameters - -| Name | Type | Description | Default | -|------------|-------------------------------------|------------------------------------------------------------------------|------------| -| `gdf` | Geopandas GeoDataFrame | Containing point geometries. | _required_ | -| `min_dist` | (int or float, optional(default=0)) | Minimum distance by which to filter out closely spaced points. | `0` | -| `remove` | (str, optional(default=first)) | Optionally choose to remove 'first' occurrences or 'last' occurrences. | `'first'` | - -#### Returns - -| Type | Description | -|---------------|----------------------------------| -| 2d array-like | Numpy array filtered coordinates | - -### get_random_point_in_polygon { #pyspatialml.vector.get_random_point_in_polygon } - -`vector.get_random_point_in_polygon(poly)` - -Generates random shapely Point geometry objects within a single -shapely Polygon object. - -#### Parameters - -| Name | Type | Description | Default | -|--------|------------------------|---------------|------------| -| `poly` | Shapely Polygon object | | _required_ | - -#### Returns - -| Type | Description | -|----------------------|---------------| +# vector { #pyspatialml.vector } + +`vector` + + + +## Functions + +| Name | Description | +| --- | --- | +| [filter_points](#pyspatialml.vector.filter_points) | Filter points in geodataframe using a minimum distance buffer. | +| [get_random_point_in_polygon](#pyspatialml.vector.get_random_point_in_polygon) | Generates random shapely Point geometry objects within a single | + +### filter_points { #pyspatialml.vector.filter_points } + +`vector.filter_points(gdf, min_dist=0, remove='first')` + +Filter points in geodataframe using a minimum distance buffer. + +#### Parameters + +| Name | Type | Description | Default | +|------------|-------------------------------------|------------------------------------------------------------------------|------------| +| `gdf` | Geopandas GeoDataFrame | Containing point geometries. | _required_ | +| `min_dist` | (int or float, optional(default=0)) | Minimum distance by which to filter out closely spaced points. | `0` | +| `remove` | (str, optional(default=first)) | Optionally choose to remove 'first' occurrences or 'last' occurrences. | `'first'` | + +#### Returns + +| Type | Description | +|---------------|----------------------------------| +| 2d array-like | Numpy array filtered coordinates | + +### get_random_point_in_polygon { #pyspatialml.vector.get_random_point_in_polygon } + +`vector.get_random_point_in_polygon(poly)` + +Generates random shapely Point geometry objects within a single +shapely Polygon object. + +#### Parameters + +| Name | Type | Description | Default | +|--------|------------------------|---------------|------------| +| `poly` | Shapely Polygon object | | _required_ | + +#### Returns + +| Type | Description | +|----------------------|---------------| | Shapely Point object | | \ No newline at end of file diff --git a/tests/test_alter.py b/tests/test_alter.py index 1c031d1..de13c9f 100644 --- a/tests/test_alter.py +++ b/tests/test_alter.py @@ -1,26 +1,26 @@ -from unittest import TestCase - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster -import geopandas as gpd -from sklearn.preprocessing import StandardScaler - - -class TestAlter(TestCase): - def setUp(self) -> None: - predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] - self.stack = Raster(predictors) - points = gpd.read_file(nc.points) - data = self.stack.extract_vector(points) - self.data = data.dropna() - - def tearDown(self) -> None: - self.stack.close() - - def test_alter(self): - scaler = StandardScaler() - scaler.fit(self.data.drop(columns=["geometry"]).values) - out = self.stack.alter(scaler) - - self.assertIsInstance(out, Raster) - self.assertEqual(out.shape, self.stack.shape) +from unittest import TestCase + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster +import geopandas as gpd +from sklearn.preprocessing import StandardScaler + + +class TestAlter(TestCase): + def setUp(self) -> None: + predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] + self.stack = Raster(predictors) + points = gpd.read_file(nc.points) + data = self.stack.extract_vector(points) + self.data = data.dropna() + + def tearDown(self) -> None: + self.stack.close() + + def test_alter(self): + scaler = StandardScaler() + scaler.fit(self.data.drop(columns=["geometry"]).values) + out = self.stack.alter(scaler) + + self.assertIsInstance(out, Raster) + self.assertEqual(out.shape, self.stack.shape) diff --git a/tests/test_append.py b/tests/test_append.py index 8943135..85cf796 100644 --- a/tests/test_append.py +++ b/tests/test_append.py @@ -1,78 +1,78 @@ -from unittest import TestCase - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestAppend(TestCase): - def setUp(self) -> None: - self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, - nc.band7] - - def test_append_inplace(self): - """Append another Raster containing a single layer with identical name - - This test should cause the Raster object to automatically rename the - duplicated names as "lsat7_2000_70_1", "lsat7_2000_70_2", etc. - - Appending a multi-band raster should result in a new layer with the - multi-band name "landsat_multiband_1", "landsat_multiband_2", etc. - - A list of Rasters can be passed to append() to append multiple rasters - """ - # append a single band raster with the same name - stack = Raster(self.predictors) - band7_mean = stack["lsat7_2000_70"].read(masked=True).mean() - stack.append(Raster(nc.band7), in_place=True) - - self.assertEqual(list(stack.names)[5], "lsat7_2000_70_1") - self.assertEqual(list(stack.names)[-1], "lsat7_2000_70_2") - self.assertEqual( - stack.lsat7_2000_70_1.read(masked=True).mean(), - stack.lsat7_2000_70_2.read(masked=True).mean(), - band7_mean, - ) - - # append a multiband raster - stack = Raster(self.predictors) - stack.append(Raster(nc.multiband), in_place=True) - self.assertEqual(list(stack.names)[6], "landsat_multiband_1") - stack.close() - - # append multiple rasters - stack = Raster(self.predictors) - stack.append([Raster(nc.band5), Raster(nc.band7)], in_place=True) - self.assertEqual(stack.count, 8) - - def test_append_with_copy(self): - """Same tests as above but create a new Raster rather than append - in place - """ - # append another Raster containing a single layer with identical name - stack = Raster(self.predictors) - band7_mean = stack["lsat7_2000_70"].read(masked=True).mean() - result = stack.append(Raster(nc.band7), in_place=False) - - # check that original is untouched - self.assertEqual(stack.count, 6) - - # check that result contains appended raster - self.assertEqual(list(result.names)[5], "lsat7_2000_70_1") - self.assertEqual(list(result.names)[-1], "lsat7_2000_70_2") - - # check that band 7 stats are the same after appending - self.assertEqual( - result.lsat7_2000_70_1.read(masked=True).mean(), - result.lsat7_2000_70_2.read(masked=True).mean(), - band7_mean, - ) - - # append a multiband raster - result = stack.append(Raster(nc.multiband), in_place=False) - self.assertEqual(list(result.names)[6], "landsat_multiband_1") - stack.close() - - # append multiple rasters - stack = Raster(self.predictors) - new_stack = stack.append([Raster(nc.band5), Raster(nc.band7)], in_place=False) - self.assertEqual(new_stack.count, 8) +from unittest import TestCase + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestAppend(TestCase): + def setUp(self) -> None: + self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, + nc.band7] + + def test_append_inplace(self): + """Append another Raster containing a single layer with identical name + + This test should cause the Raster object to automatically rename the + duplicated names as "lsat7_2000_70_1", "lsat7_2000_70_2", etc. + + Appending a multi-band raster should result in a new layer with the + multi-band name "landsat_multiband_1", "landsat_multiband_2", etc. + + A list of Rasters can be passed to append() to append multiple rasters + """ + # append a single band raster with the same name + stack = Raster(self.predictors) + band7_mean = stack["lsat7_2000_70"].read(masked=True).mean() + stack.append(Raster(nc.band7), in_place=True) + + self.assertEqual(list(stack.names)[5], "lsat7_2000_70_1") + self.assertEqual(list(stack.names)[-1], "lsat7_2000_70_2") + self.assertEqual( + stack.lsat7_2000_70_1.read(masked=True).mean(), + stack.lsat7_2000_70_2.read(masked=True).mean(), + band7_mean, + ) + + # append a multiband raster + stack = Raster(self.predictors) + stack.append(Raster(nc.multiband), in_place=True) + self.assertEqual(list(stack.names)[6], "landsat_multiband_1") + stack.close() + + # append multiple rasters + stack = Raster(self.predictors) + stack.append([Raster(nc.band5), Raster(nc.band7)], in_place=True) + self.assertEqual(stack.count, 8) + + def test_append_with_copy(self): + """Same tests as above but create a new Raster rather than append + in place + """ + # append another Raster containing a single layer with identical name + stack = Raster(self.predictors) + band7_mean = stack["lsat7_2000_70"].read(masked=True).mean() + result = stack.append(Raster(nc.band7), in_place=False) + + # check that original is untouched + self.assertEqual(stack.count, 6) + + # check that result contains appended raster + self.assertEqual(list(result.names)[5], "lsat7_2000_70_1") + self.assertEqual(list(result.names)[-1], "lsat7_2000_70_2") + + # check that band 7 stats are the same after appending + self.assertEqual( + result.lsat7_2000_70_1.read(masked=True).mean(), + result.lsat7_2000_70_2.read(masked=True).mean(), + band7_mean, + ) + + # append a multiband raster + result = stack.append(Raster(nc.multiband), in_place=False) + self.assertEqual(list(result.names)[6], "landsat_multiband_1") + stack.close() + + # append multiple rasters + stack = Raster(self.predictors) + new_stack = stack.append([Raster(nc.band5), Raster(nc.band7)], in_place=False) + self.assertEqual(new_stack.count, 8) diff --git a/tests/test_apply.py b/tests/test_apply.py index f7d6145..4a971ea 100644 --- a/tests/test_apply.py +++ b/tests/test_apply.py @@ -1,65 +1,65 @@ -from unittest import TestCase - -import numpy as np - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestCalc(TestCase): - def setUp(self) -> None: - predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] - self.stack = Raster(predictors) - self.result = None - - def tearDown(self) -> None: - self.stack.close() - self.result.close() - self.stack = None - self.result = None - - def test_calc_with_2d_output(self): - def compute_outputs_2d_array(arr): - return arr[0, :, :] + arr[1, :, :] - - self.result = self.stack.apply(compute_outputs_2d_array) - self.assertIsInstance(self.result, Raster) - self.assertEqual(self.result.count, 1) - self.assertEqual(self.result.read(masked=True).count(), 183418) - - def test_calc_with_2d_output_coerce_dtype(self): - def compute_outputs_2d_array(arr): - return arr[0, :, :] + arr[1, :, :] - - self.result = self.stack.apply(compute_outputs_2d_array, dtype=np.int16) - self.assertIsInstance(self.result, Raster) - self.assertEqual(self.result.count, 1) - self.assertEqual(self.result.read(masked=True).count(), 183418) - - def test_calc_with_3d_output(self): - def compute_outputs_3d_array(arr): - arr[0, :, :] = arr[0, :, :] + arr[1, ::] - return arr - - self.result = self.stack.apply(compute_outputs_3d_array) - self.assertIsInstance(self.result, Raster) - self.assertEqual(self.result.count, 6) - self.assertEqual(self.result.read(masked=True).count(), 1052182) - - def test_calc_with_multiprocessing(self): - def compute_outputs_2d_array(arr): - return arr[0, :, :] + arr[1, :, :] - - self.result = self.stack.apply(compute_outputs_2d_array) - self.assertIsInstance(self.result, Raster) - self.assertEqual(self.result.count, 1) - self.assertEqual(self.result.read(masked=True).count(), 183418) - - def test_calc_in_memory(self): - def compute_outputs_2d_array(arr): - return arr[0, :, :] + arr[1, :, :] - - self.result = self.stack.apply(compute_outputs_2d_array, in_memory=True) - self.assertIsInstance(self.result, Raster) - self.assertEqual(self.result.count, 1) - self.assertEqual(self.result.read(masked=True).count(), 183418) +from unittest import TestCase + +import numpy as np + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestCalc(TestCase): + def setUp(self) -> None: + predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] + self.stack = Raster(predictors) + self.result = None + + def tearDown(self) -> None: + self.stack.close() + self.result.close() + self.stack = None + self.result = None + + def test_calc_with_2d_output(self): + def compute_outputs_2d_array(arr): + return arr[0, :, :] + arr[1, :, :] + + self.result = self.stack.apply(compute_outputs_2d_array) + self.assertIsInstance(self.result, Raster) + self.assertEqual(self.result.count, 1) + self.assertEqual(self.result.read(masked=True).count(), 183418) + + def test_calc_with_2d_output_coerce_dtype(self): + def compute_outputs_2d_array(arr): + return arr[0, :, :] + arr[1, :, :] + + self.result = self.stack.apply(compute_outputs_2d_array, dtype=np.int16) + self.assertIsInstance(self.result, Raster) + self.assertEqual(self.result.count, 1) + self.assertEqual(self.result.read(masked=True).count(), 183418) + + def test_calc_with_3d_output(self): + def compute_outputs_3d_array(arr): + arr[0, :, :] = arr[0, :, :] + arr[1, ::] + return arr + + self.result = self.stack.apply(compute_outputs_3d_array) + self.assertIsInstance(self.result, Raster) + self.assertEqual(self.result.count, 6) + self.assertEqual(self.result.read(masked=True).count(), 1052182) + + def test_calc_with_multiprocessing(self): + def compute_outputs_2d_array(arr): + return arr[0, :, :] + arr[1, :, :] + + self.result = self.stack.apply(compute_outputs_2d_array) + self.assertIsInstance(self.result, Raster) + self.assertEqual(self.result.count, 1) + self.assertEqual(self.result.read(masked=True).count(), 183418) + + def test_calc_in_memory(self): + def compute_outputs_2d_array(arr): + return arr[0, :, :] + arr[1, :, :] + + self.result = self.stack.apply(compute_outputs_2d_array, in_memory=True) + self.assertIsInstance(self.result, Raster) + self.assertEqual(self.result.count, 1) + self.assertEqual(self.result.read(masked=True).count(), 183418) diff --git a/tests/test_band_math.py b/tests/test_band_math.py index 7e4b305..87b20b6 100644 --- a/tests/test_band_math.py +++ b/tests/test_band_math.py @@ -1,33 +1,33 @@ -import numpy as np -import unittest -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestArith(unittest.TestCase): - def setUp(self) -> None: - arr = np.zeros((3, 100, 100)) - arr[1,:,:] = 1 - arr[2,:,:] = 2 - self.obj = Raster(arr) - self.obj.names = ["band1", "band2", "band3"] - - def test_scalar(self): - addition = self.obj.iloc[0] + 100 - self.assertEqual(addition.min(), 100) - - division = addition / 10 - self.assertEqual(division.min(), 10.0) - - multiplication = self.obj.iloc[1] * 100 - self.assertEqual(multiplication.min(), 100) - - def test_rasterlayer(self): - addition = self.obj.iloc[0] + self.obj.iloc[1] - self.assertEqual(addition.min(), 1.0) - - multiplication = self.obj.iloc[1] * self.obj.iloc[2] - self.assertEqual(multiplication.min(), 2) - - def test_raster(self): - pass +import numpy as np +import unittest +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestArith(unittest.TestCase): + def setUp(self) -> None: + arr = np.zeros((3, 100, 100)) + arr[1,:,:] = 1 + arr[2,:,:] = 2 + self.obj = Raster(arr) + self.obj.names = ["band1", "band2", "band3"] + + def test_scalar(self): + addition = self.obj.iloc[0] + 100 + self.assertEqual(addition.min(), 100) + + division = addition / 10 + self.assertEqual(division.min(), 10.0) + + multiplication = self.obj.iloc[1] * 100 + self.assertEqual(multiplication.min(), 100) + + def test_rasterlayer(self): + addition = self.obj.iloc[0] + self.obj.iloc[1] + self.assertEqual(addition.min(), 1.0) + + multiplication = self.obj.iloc[1] * self.obj.iloc[2] + self.assertEqual(multiplication.min(), 2) + + def test_raster(self): + pass diff --git a/tests/test_band_names.py b/tests/test_band_names.py index e7c8bc5..66ba246 100644 --- a/tests/test_band_names.py +++ b/tests/test_band_names.py @@ -1,53 +1,53 @@ -import os -from unittest import TestCase - -import rasterio -import tempfile -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestNames(TestCase): - """Test the initiation of a Raster object when the file raster dataset - contains band names - """ - def setUp(self) -> None: - """Create a temporary file with a raster dataset with band names - """ - with rasterio.open(nc.multiband) as src: - self.descriptions = ["band_1", "band_2", "band_3", "band_4", "band_5"] - - self.fp = tempfile.NamedTemporaryFile(suffix=".tif").name - - with rasterio.open(self.fp, "w", **src.meta) as dst: - for i, band in enumerate(self.descriptions, start=1): - dst.write_band(i, src.read(i)) - dst.set_band_description(i, band) - - def tearDown(self) -> None: - os.remove(self.fp) - - def test_names_from_file(self) -> None: - """Test the initiation of a Raster object from a file when the file raster - dataset contains band descriptions""" - r = Raster(self.fp) - self.assertEqual(list(r.names), self.descriptions) - - def test_names_from_rasterio(self) -> None: - """Test the initiation of a Raster object from a rasterio.DatasetReader - object when the file raster dataset has band descriptions - """ - with rasterio.open(self.fp) as src: - r = Raster(src) - self.assertEqual(list(r.names), self.descriptions) - - def test_names_subsetting(self) -> None: - """Test that the names of the bands are preserved when subsetting a raster - """ - r = Raster(self.fp) - subset = r.iloc[[0, 1]] - self.assertEqual(list(subset.names), self.descriptions[0:2]) - - new = r.copy(["band_1", "band_2"]) - new["band_3"] = r["band_3"] - self.assertEqual(list(new.names), self.descriptions[0:3]) +import os +from unittest import TestCase + +import rasterio +import tempfile +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestNames(TestCase): + """Test the initiation of a Raster object when the file raster dataset + contains band names + """ + def setUp(self) -> None: + """Create a temporary file with a raster dataset with band names + """ + with rasterio.open(nc.multiband) as src: + self.descriptions = ["band_1", "band_2", "band_3", "band_4", "band_5"] + + self.fp = tempfile.NamedTemporaryFile(suffix=".tif").name + + with rasterio.open(self.fp, "w", **src.meta) as dst: + for i, band in enumerate(self.descriptions, start=1): + dst.write_band(i, src.read(i)) + dst.set_band_description(i, band) + + def tearDown(self) -> None: + os.remove(self.fp) + + def test_names_from_file(self) -> None: + """Test the initiation of a Raster object from a file when the file raster + dataset contains band descriptions""" + r = Raster(self.fp) + self.assertEqual(list(r.names), self.descriptions) + + def test_names_from_rasterio(self) -> None: + """Test the initiation of a Raster object from a rasterio.DatasetReader + object when the file raster dataset has band descriptions + """ + with rasterio.open(self.fp) as src: + r = Raster(src) + self.assertEqual(list(r.names), self.descriptions) + + def test_names_subsetting(self) -> None: + """Test that the names of the bands are preserved when subsetting a raster + """ + r = Raster(self.fp) + subset = r.iloc[[0, 1]] + self.assertEqual(list(subset.names), self.descriptions[0:2]) + + new = r.copy(["band_1", "band_2"]) + new["band_3"] = r["band_3"] + self.assertEqual(list(new.names), self.descriptions[0:3]) diff --git a/tests/test_crop.py b/tests/test_crop.py index 5315307..53dea96 100644 --- a/tests/test_crop.py +++ b/tests/test_crop.py @@ -1,39 +1,39 @@ -from unittest import TestCase - -import geopandas as gpd - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestToCrs(TestCase): - def setUp(self) -> None: - # inputs - self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, - nc.band7] - self.stack = Raster(self.predictors) - training_py = gpd.read_file(nc.polygons) - self.crop_bounds = training_py.loc[0, "geometry"].bounds - - # outputs - self.cropped = None - - def tearDown(self) -> None: - self.stack.close() - self.cropped.close() - - def test_crop_defaults(self): - self.cropped = self.stack.crop(self.crop_bounds) - - # check raster object - self.assertIsInstance(self.cropped, Raster) - self.assertEqual(self.cropped.count, self.stack.count) - self.assertEqual(self.cropped.read(masked=True).count(), 1440) - - # test nodata value is recognized - self.assertEqual(self.cropped.read(masked=True).min(), 35.0) - self.assertEqual(self.cropped.read(masked=True).max(), 168.0) - - def test_crop_in_memory(self): - self.cropped = self.stack.crop(self.crop_bounds, in_memory=True) - self.assertIsInstance(self.cropped, Raster) +from unittest import TestCase + +import geopandas as gpd + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestToCrs(TestCase): + def setUp(self) -> None: + # inputs + self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, + nc.band7] + self.stack = Raster(self.predictors) + training_py = gpd.read_file(nc.polygons) + self.crop_bounds = training_py.loc[0, "geometry"].bounds + + # outputs + self.cropped = None + + def tearDown(self) -> None: + self.stack.close() + self.cropped.close() + + def test_crop_defaults(self): + self.cropped = self.stack.crop(self.crop_bounds) + + # check raster object + self.assertIsInstance(self.cropped, Raster) + self.assertEqual(self.cropped.count, self.stack.count) + self.assertEqual(self.cropped.read(masked=True).count(), 1440) + + # test nodata value is recognized + self.assertEqual(self.cropped.read(masked=True).min(), 35.0) + self.assertEqual(self.cropped.read(masked=True).max(), 168.0) + + def test_crop_in_memory(self): + self.cropped = self.stack.crop(self.crop_bounds, in_memory=True) + self.assertIsInstance(self.cropped, Raster) diff --git a/tests/test_drop.py b/tests/test_drop.py index a18401c..ff1a5f4 100644 --- a/tests/test_drop.py +++ b/tests/test_drop.py @@ -1,59 +1,59 @@ -from unittest import TestCase - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestDrop(TestCase): - def setUp(self) -> None: - self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, - nc.band7] - - def test_drop_inplace(self): - stack = Raster(self.predictors) - stack.drop(labels="lsat7_2000_50", in_place=True) - - # check that Raster object is returned - self.assertIsInstance(stack, Raster) - - # check that RasterLayer has been dropped - self.assertEqual(stack.count, 5) - self.assertNotIn("lsat7_2000_50", stack.names) - stack.close() - - def test_drop_with_copy(self): - stack = Raster(self.predictors) - names = stack.names - result = stack.drop(labels="lsat7_2000_50", in_place=False) - - # check that Raster object is returned - self.assertIsInstance(result, Raster) - - # check that RasterLayer has been dropped - self.assertEqual(result.count, 5) - self.assertNotIn("lsat7_2000_50", result.names) - - # check that original raster is unaffected - self.assertEqual(stack.count, 6) - self.assertEqual(stack.names, names) - stack.close() - result.close() - - def test_drop_in_memory(self): - stack = Raster(self.predictors) - names = stack.names - - result = stack.intersect(in_memory=True) - result = stack.drop(labels="lsat7_2000_50", in_place=False) - - # check that Raster object is returned - self.assertIsInstance(result, Raster) - - # check that RasterLayer has been dropped - self.assertEqual(result.count, 5) - self.assertNotIn("lsat7_2000_50", result.names) - - # check that original raster is unaffected - self.assertEqual(stack.count, 6) - self.assertEqual(stack.names, names) - stack.close() +from unittest import TestCase + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestDrop(TestCase): + def setUp(self) -> None: + self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, + nc.band7] + + def test_drop_inplace(self): + stack = Raster(self.predictors) + stack.drop(labels="lsat7_2000_50", in_place=True) + + # check that Raster object is returned + self.assertIsInstance(stack, Raster) + + # check that RasterLayer has been dropped + self.assertEqual(stack.count, 5) + self.assertNotIn("lsat7_2000_50", stack.names) + stack.close() + + def test_drop_with_copy(self): + stack = Raster(self.predictors) + names = stack.names + result = stack.drop(labels="lsat7_2000_50", in_place=False) + + # check that Raster object is returned + self.assertIsInstance(result, Raster) + + # check that RasterLayer has been dropped + self.assertEqual(result.count, 5) + self.assertNotIn("lsat7_2000_50", result.names) + + # check that original raster is unaffected + self.assertEqual(stack.count, 6) + self.assertEqual(stack.names, names) + stack.close() + result.close() + + def test_drop_in_memory(self): + stack = Raster(self.predictors) + names = stack.names + + result = stack.intersect(in_memory=True) + result = stack.drop(labels="lsat7_2000_50", in_place=False) + + # check that Raster object is returned + self.assertIsInstance(result, Raster) + + # check that RasterLayer has been dropped + self.assertEqual(result.count, 5) + self.assertNotIn("lsat7_2000_50", result.names) + + # check that original raster is unaffected + self.assertEqual(stack.count, 6) + self.assertEqual(stack.names, names) + stack.close() diff --git a/tests/test_extract.py b/tests/test_extract.py index 80a35c2..8f77f35 100644 --- a/tests/test_extract.py +++ b/tests/test_extract.py @@ -1,130 +1,130 @@ -from copy import deepcopy -from unittest import TestCase - -import geopandas -import pandas as pd -import rasterio - -from pyspatialml import Raster -from pyspatialml.datasets import nc - - -class TestExtract(TestCase): - def setUp(self) -> None: - self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] - self.extracted_grass = pd.read_table(nc.extracted_pixels, delimiter=" ") - - self.stack = Raster(self.predictors) - - def tearDown(self) -> None: - self.stack.close() - - def test_extract_points(self): - training_pt = geopandas.read_file(nc.points) - - # check that extracted training data as a DataFrame match known values - df = self.stack.extract_vector(gdf=training_pt) - df = df.dropna() - training_pt = training_pt.dropna() - - self.assertTrue( - (df["lsat7_2000_10"].values == training_pt["b1"].values).all() - ) - self.assertTrue( - (df["lsat7_2000_20"].values == training_pt["b2"].values).all() - ) - self.assertTrue( - (df["lsat7_2000_30"].values == training_pt["b3"].values).all() - ) - self.assertTrue( - (df["lsat7_2000_40"].values == training_pt["b4"].values).all() - ) - self.assertTrue( - (df["lsat7_2000_50"].values == training_pt["b5"].values).all() - ) - self.assertTrue( - (df["lsat7_2000_70"].values == training_pt["b7"].values).all() - ) - - def test_extract_polygons(self): - # extract training data from polygons - training_py = geopandas.read_file(nc.polygons) - df = self.stack.extract_vector(gdf=training_py) - df = df.dropna() - - df = df.merge( - right=training_py.loc[:, ("id", "label")], - left_on="geometry_idx", - right_on="index", - right_index=True, - ) - - # compare to extracted data using GRASS GIS - self.assertEqual(df.shape[0], self.extracted_grass.shape[0]) - self.assertAlmostEqual( - df["lsat7_2000_10"].mean(), self.extracted_grass["b1"].mean(), - places=2 - ) - self.assertAlmostEqual( - df["lsat7_2000_20"].mean(), self.extracted_grass["b2"].mean(), - places=2 - ) - self.assertAlmostEqual( - df["lsat7_2000_30"].mean(), self.extracted_grass["b3"].mean(), - places=2 - ) - self.assertAlmostEqual( - df["lsat7_2000_40"].mean(), self.extracted_grass["b4"].mean(), - places=2 - ) - self.assertAlmostEqual( - df["lsat7_2000_50"].mean(), self.extracted_grass["b5"].mean(), - places=2 - ) - self.assertAlmostEqual( - df["lsat7_2000_70"].mean(), self.extracted_grass["b7"].mean(), - places=2 - ) - - def test_extract_lines(self): - # extract training data from lines - training_py = geopandas.read_file(nc.polygons) - training_lines = deepcopy(training_py) - training_lines["geometry"] = training_lines.geometry.boundary - df = self.stack.extract_vector(gdf=training_lines).dropna() - - # check shapes of extracted pixels - self.assertEqual(df.shape[0], 948) - - def test_extract_raster(self): - # extract training data from labelled pixels - with rasterio.open(nc.labelled_pixels) as src: - df = self.stack.extract_raster(src) - - df = df.dropna() - - self.assertEqual(df.shape[0], self.extracted_grass.shape[0]) - self.assertAlmostEqual( - df["lsat7_2000_10"].mean(), self.extracted_grass["b1"].mean(), - places=3 - ) - self.assertAlmostEqual( - df["lsat7_2000_20"].mean(), self.extracted_grass["b2"].mean(), - places=3 - ) - self.assertAlmostEqual( - df["lsat7_2000_30"].mean(), self.extracted_grass["b3"].mean(), - places=3 - ) - self.assertAlmostEqual( - df["lsat7_2000_40"].mean(), self.extracted_grass["b4"].mean(), - places=3 - ) - self.assertAlmostEqual( - df["lsat7_2000_50"].mean(), self.extracted_grass["b5"].mean(), - places=3 - ) - self.assertAlmostEqual( - df["lsat7_2000_70"].mean(), self.extracted_grass["b7"].mean(), - places=3 - ) +from copy import deepcopy +from unittest import TestCase + +import geopandas +import pandas as pd +import rasterio + +from pyspatialml import Raster +from pyspatialml.datasets import nc + + +class TestExtract(TestCase): + def setUp(self) -> None: + self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] + self.extracted_grass = pd.read_table(nc.extracted_pixels, delimiter=" ") + + self.stack = Raster(self.predictors) + + def tearDown(self) -> None: + self.stack.close() + + def test_extract_points(self): + training_pt = geopandas.read_file(nc.points) + + # check that extracted training data as a DataFrame match known values + df = self.stack.extract_vector(gdf=training_pt) + df = df.dropna() + training_pt = training_pt.dropna() + + self.assertTrue( + (df["lsat7_2000_10"].values == training_pt["b1"].values).all() + ) + self.assertTrue( + (df["lsat7_2000_20"].values == training_pt["b2"].values).all() + ) + self.assertTrue( + (df["lsat7_2000_30"].values == training_pt["b3"].values).all() + ) + self.assertTrue( + (df["lsat7_2000_40"].values == training_pt["b4"].values).all() + ) + self.assertTrue( + (df["lsat7_2000_50"].values == training_pt["b5"].values).all() + ) + self.assertTrue( + (df["lsat7_2000_70"].values == training_pt["b7"].values).all() + ) + + def test_extract_polygons(self): + # extract training data from polygons + training_py = geopandas.read_file(nc.polygons) + df = self.stack.extract_vector(gdf=training_py) + df = df.dropna() + + df = df.merge( + right=training_py.loc[:, ("id", "label")], + left_on="geometry_idx", + right_on="index", + right_index=True, + ) + + # compare to extracted data using GRASS GIS + self.assertEqual(df.shape[0], self.extracted_grass.shape[0]) + self.assertAlmostEqual( + df["lsat7_2000_10"].mean(), self.extracted_grass["b1"].mean(), + places=2 + ) + self.assertAlmostEqual( + df["lsat7_2000_20"].mean(), self.extracted_grass["b2"].mean(), + places=2 + ) + self.assertAlmostEqual( + df["lsat7_2000_30"].mean(), self.extracted_grass["b3"].mean(), + places=2 + ) + self.assertAlmostEqual( + df["lsat7_2000_40"].mean(), self.extracted_grass["b4"].mean(), + places=2 + ) + self.assertAlmostEqual( + df["lsat7_2000_50"].mean(), self.extracted_grass["b5"].mean(), + places=2 + ) + self.assertAlmostEqual( + df["lsat7_2000_70"].mean(), self.extracted_grass["b7"].mean(), + places=2 + ) + + def test_extract_lines(self): + # extract training data from lines + training_py = geopandas.read_file(nc.polygons) + training_lines = deepcopy(training_py) + training_lines["geometry"] = training_lines.geometry.boundary + df = self.stack.extract_vector(gdf=training_lines).dropna() + + # check shapes of extracted pixels + self.assertEqual(df.shape[0], 948) + + def test_extract_raster(self): + # extract training data from labelled pixels + with rasterio.open(nc.labelled_pixels) as src: + df = self.stack.extract_raster(src) + + df = df.dropna() + + self.assertEqual(df.shape[0], self.extracted_grass.shape[0]) + self.assertAlmostEqual( + df["lsat7_2000_10"].mean(), self.extracted_grass["b1"].mean(), + places=3 + ) + self.assertAlmostEqual( + df["lsat7_2000_20"].mean(), self.extracted_grass["b2"].mean(), + places=3 + ) + self.assertAlmostEqual( + df["lsat7_2000_30"].mean(), self.extracted_grass["b3"].mean(), + places=3 + ) + self.assertAlmostEqual( + df["lsat7_2000_40"].mean(), self.extracted_grass["b4"].mean(), + places=3 + ) + self.assertAlmostEqual( + df["lsat7_2000_50"].mean(), self.extracted_grass["b5"].mean(), + places=3 + ) + self.assertAlmostEqual( + df["lsat7_2000_70"].mean(), self.extracted_grass["b7"].mean(), + places=3 + ) diff --git a/tests/test_indexing.py b/tests/test_indexing.py index e9462c1..73a05d3 100644 --- a/tests/test_indexing.py +++ b/tests/test_indexing.py @@ -1,100 +1,100 @@ -from unittest import TestCase - -from pyspatialml import Raster, RasterLayer -from pyspatialml.datasets import nc - - -class TestIndexing(TestCase): - def setUp(self) -> None: - self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] - - def test_naming(self): - stack = Raster(self.predictors + [nc.multiband]) - - # check unique naming when stacking multiband raster - self.assertEqual(stack.count, 11) - expected_names = [ - "lsat7_2000_10", - "lsat7_2000_20", - "lsat7_2000_30", - "lsat7_2000_40", - "lsat7_2000_50", - "lsat7_2000_70", - "landsat_multiband_1", - "landsat_multiband_2", - "landsat_multiband_3", - "landsat_multiband_4", - "landsat_multiband_5", - ] - self.assertListEqual(list(stack.names), expected_names) - stack.close() - - def test_subset_single_layer(self): - stack = Raster(self.predictors + [nc.multiband]) - - # Subset a single layer using an index position - returns a RasterLayer - self.assertIsInstance(stack.iloc[0], RasterLayer) - - # Subset a single layer using a label - returns a RasterLayer - self.assertIsInstance(stack["lsat7_2000_10"], RasterLayer) - - # Subset a single layer using an attribute - returns a RasterLayer - self.assertIsInstance(stack.lsat7_2000_10, RasterLayer) - - # Check that the raster values are the same as the original values - # after subsetting - self.assertEqual( - stack.lsat7_2000_10.read(masked=True).mean(), - 80.56715262406088 - ) - self.assertEqual( - stack.lsat7_2000_70.read(masked=True).mean(), - 59.17773813401238 - ) - stack.close() - - def test_subset_multiple_layers(self): - stack = Raster(self.predictors + [nc.multiband]) - - # Subset multiple layers using a slice of index positions - # - returns a Raster object - self.assertIsInstance(stack.iloc[0:2], Raster) - - # Subset multiple layers using a list of index positions - # - returns a Raster object - self.assertIsInstance(stack.iloc[[0, 1, 2]], Raster) - - # Subset multiple layers using a list of labels - # - returns a Raster object - subset_raster = stack[["lsat7_2000_10", "lsat7_2000_70"]] - self.assertIsInstance(subset_raster, Raster) - self.assertListEqual( - list(subset_raster.names), - ["lsat7_2000_10", "lsat7_2000_70"] - ) - - # Check that label and integer subset return the same layers - self.assertListEqual( - list(stack.iloc[0:3].names), - list(stack[["lsat7_2000_10", "lsat7_2000_20", "lsat7_2000_30"]].names), - ) - - stack.close() - - def test_indexing(self): - stack = Raster(self.predictors + [nc.multiband]) - - # replace band 1 with band 7 - band7_mean = stack["lsat7_2000_70"].read(masked=True).mean() - - stack.iloc[0] = Raster(nc.band7).iloc[0] - - self.assertEqual(stack.iloc[0].read(masked=True).mean(), band7_mean) - self.assertEqual(stack["lsat7_2000_10"].read(masked=True).mean(), - band7_mean) - self.assertEqual(stack["lsat7_2000_10"].read(masked=True).mean(), - band7_mean) - self.assertEqual(stack.lsat7_2000_10.read(masked=True).mean(), - band7_mean) - - stack.close() +from unittest import TestCase + +from pyspatialml import Raster, RasterLayer +from pyspatialml.datasets import nc + + +class TestIndexing(TestCase): + def setUp(self) -> None: + self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] + + def test_naming(self): + stack = Raster(self.predictors + [nc.multiband]) + + # check unique naming when stacking multiband raster + self.assertEqual(stack.count, 11) + expected_names = [ + "lsat7_2000_10", + "lsat7_2000_20", + "lsat7_2000_30", + "lsat7_2000_40", + "lsat7_2000_50", + "lsat7_2000_70", + "landsat_multiband_1", + "landsat_multiband_2", + "landsat_multiband_3", + "landsat_multiband_4", + "landsat_multiband_5", + ] + self.assertListEqual(list(stack.names), expected_names) + stack.close() + + def test_subset_single_layer(self): + stack = Raster(self.predictors + [nc.multiband]) + + # Subset a single layer using an index position - returns a RasterLayer + self.assertIsInstance(stack.iloc[0], RasterLayer) + + # Subset a single layer using a label - returns a RasterLayer + self.assertIsInstance(stack["lsat7_2000_10"], RasterLayer) + + # Subset a single layer using an attribute - returns a RasterLayer + self.assertIsInstance(stack.lsat7_2000_10, RasterLayer) + + # Check that the raster values are the same as the original values + # after subsetting + self.assertEqual( + stack.lsat7_2000_10.read(masked=True).mean(), + 80.56715262406088 + ) + self.assertEqual( + stack.lsat7_2000_70.read(masked=True).mean(), + 59.17773813401238 + ) + stack.close() + + def test_subset_multiple_layers(self): + stack = Raster(self.predictors + [nc.multiband]) + + # Subset multiple layers using a slice of index positions + # - returns a Raster object + self.assertIsInstance(stack.iloc[0:2], Raster) + + # Subset multiple layers using a list of index positions + # - returns a Raster object + self.assertIsInstance(stack.iloc[[0, 1, 2]], Raster) + + # Subset multiple layers using a list of labels + # - returns a Raster object + subset_raster = stack[["lsat7_2000_10", "lsat7_2000_70"]] + self.assertIsInstance(subset_raster, Raster) + self.assertListEqual( + list(subset_raster.names), + ["lsat7_2000_10", "lsat7_2000_70"] + ) + + # Check that label and integer subset return the same layers + self.assertListEqual( + list(stack.iloc[0:3].names), + list(stack[["lsat7_2000_10", "lsat7_2000_20", "lsat7_2000_30"]].names), + ) + + stack.close() + + def test_indexing(self): + stack = Raster(self.predictors + [nc.multiband]) + + # replace band 1 with band 7 + band7_mean = stack["lsat7_2000_70"].read(masked=True).mean() + + stack.iloc[0] = Raster(nc.band7).iloc[0] + + self.assertEqual(stack.iloc[0].read(masked=True).mean(), band7_mean) + self.assertEqual(stack["lsat7_2000_10"].read(masked=True).mean(), + band7_mean) + self.assertEqual(stack["lsat7_2000_10"].read(masked=True).mean(), + band7_mean) + self.assertEqual(stack.lsat7_2000_10.read(masked=True).mean(), + band7_mean) + + stack.close() diff --git a/tests/test_initiation.py b/tests/test_initiation.py index 41cc13d..3848d3f 100644 --- a/tests/test_initiation.py +++ b/tests/test_initiation.py @@ -1,108 +1,108 @@ -from unittest import TestCase - -import os -import rasterio -import numpy as np - -from pyspatialml import Raster, RasterLayer -from pyspatialml.datasets import nc - - -class TestInit(TestCase): - def setUp(self) -> None: - # inputs - self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, - nc.band7] - - # test results - self.stack = None - - def tearDown(self) -> None: - self.stack.close() - - def test_initiation_files(self): - # test init from list of file paths - self.stack = Raster(self.predictors) - self.assertIsInstance(self.stack, Raster) - self.assertEqual(self.stack.count, 6) - - def test_initiation_file(self): - # test init from single file path - self.stack = Raster(nc.band1) - self.assertIsInstance(self.stack, Raster) - self.assertEqual(self.stack.count, 1) - - def test_initiation_datasetreader(self): - # test init from single rasterio.io.datasetreader - with rasterio.open(nc.band1) as src: - self.stack = Raster(src) - self.assertIsInstance(self.stack, Raster) - self.assertEqual(self.stack.count, 1) - - def test_initiation_list_datasetreader(self): - # test init from list of rasterio.io.datasetreader objects - srcs = [] - for f in self.predictors: - srcs.append(rasterio.open(f)) - self.stack = Raster(srcs) - self.assertIsInstance(self.stack, Raster) - self.assertEqual(self.stack.count, 6) - - def test_initiation_band(self): - # test init from single rasterio.band object - with rasterio.open(nc.band1) as src: - band = rasterio.band(src, 1) - self.stack = Raster(band) - self.assertIsInstance(self.stack, Raster) - self.assertEqual(self.stack.count, 1) - - def test_initiation_list_bands(self): - # test init from list of rasterio.band objects - bands = [] - for f in self.predictors: - src = rasterio.open(f) - bands.append(rasterio.band(src, 1)) - self.stack = Raster(bands) - self.assertIsInstance(self.stack, Raster) - self.assertEqual(self.stack.count, 6) - - def test_initiation_rasterlayer(self): - # test init from a single RasterLayer object - with rasterio.open(nc.band1) as src: - band = rasterio.band(src, 1) - layer = RasterLayer(band) - self.stack = Raster(layer) - self.assertIsInstance(self.stack, Raster) - self.assertEqual(self.stack.count, 1) - - def test_initiation_list_rasterlayer(self): - # test init from a list of RasterLayer objects - layers = [] - for f in self.predictors: - src = rasterio.open(f) - band = rasterio.band(src, 1) - layers.append(RasterLayer(band)) - self.stack = Raster(layers) - self.assertIsInstance(self.stack, Raster) - self.assertEqual(self.stack.count, 6) - - def test_initiation_array(self): - # check initiation of single-band raster from file - arr = np.zeros((100, 100)) - self.stack = Raster(arr) - - # check output is written to tempfile - self.assertTrue(os.path.exists(self.stack.iloc[0].file)) - - # check some operations on the created raster - layer_name = list(self.stack.names)[0] - self.stack = self.stack.rename({layer_name: 'new_layer'}) - self.assertEqual(list(self.stack.names)[0], 'new_layer') - - # check initiation from array in memory - arr = np.zeros((100, 100)) - self.stack = Raster(arr, in_memory=True) - layer_name = list(self.stack.names)[0] - - self.stack = self.stack.rename({layer_name: 'new_layer'}) - self.assertEqual(list(self.stack.names)[0], 'new_layer') +from unittest import TestCase + +import os +import rasterio +import numpy as np + +from pyspatialml import Raster, RasterLayer +from pyspatialml.datasets import nc + + +class TestInit(TestCase): + def setUp(self) -> None: + # inputs + self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, + nc.band7] + + # test results + self.stack = None + + def tearDown(self) -> None: + self.stack.close() + + def test_initiation_files(self): + # test init from list of file paths + self.stack = Raster(self.predictors) + self.assertIsInstance(self.stack, Raster) + self.assertEqual(self.stack.count, 6) + + def test_initiation_file(self): + # test init from single file path + self.stack = Raster(nc.band1) + self.assertIsInstance(self.stack, Raster) + self.assertEqual(self.stack.count, 1) + + def test_initiation_datasetreader(self): + # test init from single rasterio.io.datasetreader + with rasterio.open(nc.band1) as src: + self.stack = Raster(src) + self.assertIsInstance(self.stack, Raster) + self.assertEqual(self.stack.count, 1) + + def test_initiation_list_datasetreader(self): + # test init from list of rasterio.io.datasetreader objects + srcs = [] + for f in self.predictors: + srcs.append(rasterio.open(f)) + self.stack = Raster(srcs) + self.assertIsInstance(self.stack, Raster) + self.assertEqual(self.stack.count, 6) + + def test_initiation_band(self): + # test init from single rasterio.band object + with rasterio.open(nc.band1) as src: + band = rasterio.band(src, 1) + self.stack = Raster(band) + self.assertIsInstance(self.stack, Raster) + self.assertEqual(self.stack.count, 1) + + def test_initiation_list_bands(self): + # test init from list of rasterio.band objects + bands = [] + for f in self.predictors: + src = rasterio.open(f) + bands.append(rasterio.band(src, 1)) + self.stack = Raster(bands) + self.assertIsInstance(self.stack, Raster) + self.assertEqual(self.stack.count, 6) + + def test_initiation_rasterlayer(self): + # test init from a single RasterLayer object + with rasterio.open(nc.band1) as src: + band = rasterio.band(src, 1) + layer = RasterLayer(band) + self.stack = Raster(layer) + self.assertIsInstance(self.stack, Raster) + self.assertEqual(self.stack.count, 1) + + def test_initiation_list_rasterlayer(self): + # test init from a list of RasterLayer objects + layers = [] + for f in self.predictors: + src = rasterio.open(f) + band = rasterio.band(src, 1) + layers.append(RasterLayer(band)) + self.stack = Raster(layers) + self.assertIsInstance(self.stack, Raster) + self.assertEqual(self.stack.count, 6) + + def test_initiation_array(self): + # check initiation of single-band raster from file + arr = np.zeros((100, 100)) + self.stack = Raster(arr) + + # check output is written to tempfile + self.assertTrue(os.path.exists(self.stack.iloc[0].file)) + + # check some operations on the created raster + layer_name = list(self.stack.names)[0] + self.stack = self.stack.rename({layer_name: 'new_layer'}) + self.assertEqual(list(self.stack.names)[0], 'new_layer') + + # check initiation from array in memory + arr = np.zeros((100, 100)) + self.stack = Raster(arr, in_memory=True) + layer_name = list(self.stack.names)[0] + + self.stack = self.stack.rename({layer_name: 'new_layer'}) + self.assertEqual(list(self.stack.names)[0], 'new_layer') diff --git a/tests/test_intersect.py b/tests/test_intersect.py index eb7a60d..13038bc 100644 --- a/tests/test_intersect.py +++ b/tests/test_intersect.py @@ -1,63 +1,63 @@ -from unittest import TestCase - -import numpy as np - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestIntersect(TestCase): - def setUp(self) -> None: - # inputs - self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, - nc.band7] - self.stack = Raster(self.predictors) - - # test results - self.result = None - - def tearDown(self) -> None: - self.stack.close() - self.result.close() - - def test_intersect_defaults(self): - self.result = self.stack.intersect() - - # check raster object - self.assertIsInstance(self.result, Raster) - self.assertEqual(self.result.count, self.stack.count) - self.assertEqual(self.result.read(masked=True).count(), 810552) - - # test nodata value is recognized - self.assertEqual(self.result.read(masked=True).min(), 1.0) - self.assertEqual(self.result.read(masked=True).max(), 255.0) - - def test_intersect_custom_dtype(self): - self.result = self.stack.intersect(dtype=np.int16) - - # check raster object - self.assertIsInstance(self.result, Raster) - self.assertEqual(self.result.count, self.stack.count) - self.assertEqual(self.result.read(masked=True).count(), 810552) - - # test nodata value is recognized - self.assertEqual(self.result.read(masked=True).min(), 1) - self.assertEqual(self.result.read(masked=True).max(), 255) - - def test_intersect_custom_nodata(self): - self.result = self.stack.intersect(dtype=np.int16, nodata=-999) - - # check raster object - self.assertIsInstance(self.result, Raster) - self.assertEqual(self.result.count, self.stack.count) - self.assertEqual(self.result.read(masked=True).count(), 810552) - - # test nodata value is recognized - self.assertEqual(self.result.read(masked=True).min(), 1) - self.assertEqual(self.result.read(masked=True).max(), 255) - - def test_intersect_in_memory(self): - self.result = self.stack.intersect(in_memory=True) - - # check raster object - self.assertIsInstance(self.result, Raster) +from unittest import TestCase + +import numpy as np + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestIntersect(TestCase): + def setUp(self) -> None: + # inputs + self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, + nc.band7] + self.stack = Raster(self.predictors) + + # test results + self.result = None + + def tearDown(self) -> None: + self.stack.close() + self.result.close() + + def test_intersect_defaults(self): + self.result = self.stack.intersect() + + # check raster object + self.assertIsInstance(self.result, Raster) + self.assertEqual(self.result.count, self.stack.count) + self.assertEqual(self.result.read(masked=True).count(), 810552) + + # test nodata value is recognized + self.assertEqual(self.result.read(masked=True).min(), 1.0) + self.assertEqual(self.result.read(masked=True).max(), 255.0) + + def test_intersect_custom_dtype(self): + self.result = self.stack.intersect(dtype=np.int16) + + # check raster object + self.assertIsInstance(self.result, Raster) + self.assertEqual(self.result.count, self.stack.count) + self.assertEqual(self.result.read(masked=True).count(), 810552) + + # test nodata value is recognized + self.assertEqual(self.result.read(masked=True).min(), 1) + self.assertEqual(self.result.read(masked=True).max(), 255) + + def test_intersect_custom_nodata(self): + self.result = self.stack.intersect(dtype=np.int16, nodata=-999) + + # check raster object + self.assertIsInstance(self.result, Raster) + self.assertEqual(self.result.count, self.stack.count) + self.assertEqual(self.result.read(masked=True).count(), 810552) + + # test nodata value is recognized + self.assertEqual(self.result.read(masked=True).min(), 1) + self.assertEqual(self.result.read(masked=True).max(), 255) + + def test_intersect_in_memory(self): + self.result = self.stack.intersect(in_memory=True) + + # check raster object + self.assertIsInstance(self.result, Raster) diff --git a/tests/test_mask.py b/tests/test_mask.py index d0770e3..b8e4c6b 100644 --- a/tests/test_mask.py +++ b/tests/test_mask.py @@ -1,79 +1,79 @@ -from unittest import TestCase - -import geopandas as gpd -import numpy as np - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestMask(TestCase): - def setUp(self) -> None: - # test inputs - training_py = gpd.read_file(nc.polygons) - self.mask_py = training_py.iloc[0:1, :] - - predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, - nc.band7] - self.stack = Raster(predictors) - - # test results - self.masked_object = None - - def tearDown(self) -> None: - self.stack.close() - self.masked_object.close() - - def test_mask_defaults(self): - self.masked_object = self.stack.mask(self.mask_py) - - # check raster object - self.assertIsInstance(self.masked_object, Raster) - self.assertEqual(self.masked_object.count, self.stack.count) - self.assertEqual(self.masked_object.read(masked=True).count(), 738) - - # test nodata value is recognized - self.assertEqual(self.masked_object.read(masked=True).min(), 38.0) - self.assertEqual(self.masked_object.read(masked=True).max(), 168.0) - - def test_mask_inverted(self): - self.masked_object = self.stack.mask(self.mask_py, invert=True) - - # check raster object - self.assertIsInstance(self.masked_object, Raster) - self.assertEqual(self.masked_object.count, self.stack.count) - self.assertEqual(self.masked_object.read(masked=True).count(), 1051444) - - # test nodata value is recognized - self.assertEqual(self.masked_object.read(masked=True).min(), 1.0) - self.assertEqual(self.masked_object.read(masked=True).max(), 255.0) - - def test_mask_custom_dtype(self): - self.masked_object = self.stack.mask(self.mask_py, dtype=np.int16) - - # check raster object - self.assertIsInstance(self.masked_object, Raster) - self.assertEqual(self.masked_object.count, self.stack.count) - self.assertEqual(self.masked_object.read(masked=True).count(), 738) - - # test nodata value is recognized - self.assertEqual(self.masked_object.read(masked=True).min(), 38) - self.assertEqual(self.masked_object.read(masked=True).max(), 168) - - def test_mask_custom_nodata(self): - self.masked_object = self.stack.mask(self.mask_py, nodata=-99999) - - # check raster object - self.assertIsInstance(self.masked_object, Raster) - self.assertEqual(self.masked_object.count, self.stack.count) - self.assertEqual(self.masked_object.read(masked=True).count(), 738) - - # test nodata value is recognized - self.assertEqual(self.masked_object.read(masked=True).min(), 38.0) - self.assertEqual(self.masked_object.read(masked=True).max(), 168.0) - - def test_mask_in_memory(self): - self.masked_object = self.stack.mask(self.mask_py, in_memory=True) - - # check raster object - self.assertIsInstance(self.masked_object, Raster) +from unittest import TestCase + +import geopandas as gpd +import numpy as np + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestMask(TestCase): + def setUp(self) -> None: + # test inputs + training_py = gpd.read_file(nc.polygons) + self.mask_py = training_py.iloc[0:1, :] + + predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, + nc.band7] + self.stack = Raster(predictors) + + # test results + self.masked_object = None + + def tearDown(self) -> None: + self.stack.close() + self.masked_object.close() + + def test_mask_defaults(self): + self.masked_object = self.stack.mask(self.mask_py) + + # check raster object + self.assertIsInstance(self.masked_object, Raster) + self.assertEqual(self.masked_object.count, self.stack.count) + self.assertEqual(self.masked_object.read(masked=True).count(), 738) + + # test nodata value is recognized + self.assertEqual(self.masked_object.read(masked=True).min(), 38.0) + self.assertEqual(self.masked_object.read(masked=True).max(), 168.0) + + def test_mask_inverted(self): + self.masked_object = self.stack.mask(self.mask_py, invert=True) + + # check raster object + self.assertIsInstance(self.masked_object, Raster) + self.assertEqual(self.masked_object.count, self.stack.count) + self.assertEqual(self.masked_object.read(masked=True).count(), 1051444) + + # test nodata value is recognized + self.assertEqual(self.masked_object.read(masked=True).min(), 1.0) + self.assertEqual(self.masked_object.read(masked=True).max(), 255.0) + + def test_mask_custom_dtype(self): + self.masked_object = self.stack.mask(self.mask_py, dtype=np.int16) + + # check raster object + self.assertIsInstance(self.masked_object, Raster) + self.assertEqual(self.masked_object.count, self.stack.count) + self.assertEqual(self.masked_object.read(masked=True).count(), 738) + + # test nodata value is recognized + self.assertEqual(self.masked_object.read(masked=True).min(), 38) + self.assertEqual(self.masked_object.read(masked=True).max(), 168) + + def test_mask_custom_nodata(self): + self.masked_object = self.stack.mask(self.mask_py, nodata=-99999) + + # check raster object + self.assertIsInstance(self.masked_object, Raster) + self.assertEqual(self.masked_object.count, self.stack.count) + self.assertEqual(self.masked_object.read(masked=True).count(), 738) + + # test nodata value is recognized + self.assertEqual(self.masked_object.read(masked=True).min(), 38.0) + self.assertEqual(self.masked_object.read(masked=True).max(), 168.0) + + def test_mask_in_memory(self): + self.masked_object = self.stack.mask(self.mask_py, in_memory=True) + + # check raster object + self.assertIsInstance(self.masked_object, Raster) diff --git a/tests/test_plotting.py b/tests/test_plotting.py index 93c9aa3..7f301f4 100644 --- a/tests/test_plotting.py +++ b/tests/test_plotting.py @@ -1,41 +1,41 @@ -from unittest import TestCase - -import matplotlib as mpl -import numpy as np - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestPlotting(TestCase): - def setUp(self) -> None: - self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] - self.stack = Raster(self.predictors) - self.stack_single = Raster(self.predictors[0]) - - def tearDown(self) -> None: - self.stack.close() - self.stack_single.close() - - def test_plotting_raster(self): - - # test basic raster matrix plot - p = self.stack.plot() - self.assertIsInstance(p, np.ndarray) - - # test with arguments - p = self.stack.plot( - cmap="plasma", - norm=mpl.colors.Normalize(vmin=10, vmax=100), - title_fontsize=10, - label_fontsize=10, - names=["band1", "band2", "band3", "band4", "band5", "band7"], - figsize=(10, 5), - legend_kwds={"orientation": "horizontal"} - ) - self.assertIsInstance(p, np.ndarray) - - def test_plotting_single(self): - p = self.stack_single.plot( - legend_kwds={"orientation": "horizontal", "fraction": 0.04}) - self.assertIsInstance(p, mpl.axes.Subplot) +from unittest import TestCase + +import matplotlib as mpl +import numpy as np + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestPlotting(TestCase): + def setUp(self) -> None: + self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] + self.stack = Raster(self.predictors) + self.stack_single = Raster(self.predictors[0]) + + def tearDown(self) -> None: + self.stack.close() + self.stack_single.close() + + def test_plotting_raster(self): + + # test basic raster matrix plot + p = self.stack.plot() + self.assertIsInstance(p, np.ndarray) + + # test with arguments + p = self.stack.plot( + cmap="plasma", + norm=mpl.colors.Normalize(vmin=10, vmax=100), + title_fontsize=10, + label_fontsize=10, + names=["band1", "band2", "band3", "band4", "band5", "band7"], + figsize=(10, 5), + legend_kwds={"orientation": "horizontal"} + ) + self.assertIsInstance(p, np.ndarray) + + def test_plotting_single(self): + p = self.stack_single.plot( + legend_kwds={"orientation": "horizontal", "fraction": 0.04}) + self.assertIsInstance(p, mpl.axes.Subplot) diff --git a/tests/test_prediction.py b/tests/test_prediction.py index d7d03ea..9c8bebf 100644 --- a/tests/test_prediction.py +++ b/tests/test_prediction.py @@ -1,215 +1,215 @@ -from unittest import TestCase - -import geopandas as gpd -from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor - -import pyspatialml.datasets.meuse as ms -from pyspatialml import Raster -from pyspatialml.datasets import nc - - -class TestPrediction(TestCase): - nc_predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] - stack_nc = Raster(nc_predictors) - stack_meuse = Raster(ms.predictors) - - def test_classification(self): - training_pt = gpd.read_file(nc.points) - df_points = self.stack_nc.extract_vector(gdf=training_pt) - df_points["class_id"] = training_pt["id"].values - df_points = df_points.dropna() - - clf = RandomForestClassifier(n_estimators=50) - X = df_points.drop(columns=["class_id", "geometry"]).values - y = df_points.class_id.values - clf.fit(X, y) - - # classification - cla = self.stack_nc.predict(estimator=clf, dtype="int16", nodata=0) - self.assertIsInstance(cla, Raster) - self.assertEqual(cla.count, 1) - self.assertEqual(cla.read(masked=True).count(), 135092) - - # class probabilities - probs = self.stack_nc.predict_proba(estimator=clf) - self.assertIsInstance(cla, Raster) - self.assertEqual(probs.count, 7) - - for layer in probs.values(): - self.assertEqual(layer.read(masked=True).count(), 135092) - - def test_classification_in_memory(self): - training_pt = gpd.read_file(nc.points) - df_points = self.stack_nc.extract_vector(gdf=training_pt) - df_points["class_id"] = training_pt["id"].values - df_points = df_points.dropna() - - clf = RandomForestClassifier(n_estimators=50) - X = df_points.drop(columns=["class_id", "geometry"]).values - y = df_points.class_id.values - clf.fit(X, y) - - # classification - cla = self.stack_nc.predict(estimator=clf, dtype="int16", nodata=0, - in_memory=True) - self.assertIsInstance(cla, Raster) - self.assertEqual(cla.count, 1) - self.assertEqual(cla.read(masked=True).count(), 135092) - cla.close() - - # class probabilities - probs = self.stack_nc.predict_proba(estimator=clf, in_memory=True) - self.assertIsInstance(cla, Raster) - self.assertEqual(probs.count, 7) - - for layer in probs.values(): - self.assertEqual(layer.read(masked=True).count(), 135092) - - probs.close() - - def test_regression(self): - training_pt = gpd.read_file(ms.meuse) - training = self.stack_meuse.extract_vector(gdf=training_pt) - training["zinc"] = training_pt["zinc"].values - training["cadmium"] = training_pt["cadmium"].values - training["copper"] = training_pt["copper"].values - training["lead"] = training_pt["lead"].values - training = training.dropna() - - # single target regression - regr = RandomForestRegressor(n_estimators=50) - X = training.loc[:, self.stack_meuse.names].values - y = training["zinc"].values - regr.fit(X, y) - - single_regr = self.stack_meuse.predict(regr) - self.assertIsInstance(single_regr, Raster) - self.assertEqual(single_regr.count, 1) - single_regr.close() - - # multi-target regression - y = training.loc[:, ["zinc", "cadmium", "copper", "lead"]] - regr.fit(X, y) - multi_regr = self.stack_meuse.predict(regr) - self.assertIsInstance(multi_regr, Raster) - self.assertEqual(multi_regr.count, 4) - multi_regr.close() - - def test_regression_in_memory(self): - training_pt = gpd.read_file(ms.meuse) - training = self.stack_meuse.extract_vector(gdf=training_pt) - training["zinc"] = training_pt["zinc"].values - training["cadmium"] = training_pt["cadmium"].values - training["copper"] = training_pt["copper"].values - training["lead"] = training_pt["lead"].values - training = training.dropna() - - # single target regression - regr = RandomForestRegressor(n_estimators=50) - X = training.loc[:, self.stack_meuse.names].values - y = training["zinc"].values - regr.fit(X, y) - - single_regr = self.stack_meuse.predict(regr, in_memory=True) - self.assertIsInstance(single_regr, Raster) - self.assertEqual(single_regr.count, 1) - single_regr.close() - - # multi-target regression - y = training.loc[:, ["zinc", "cadmium", "copper", "lead"]] - regr.fit(X, y) - multi_regr = self.stack_meuse.predict(regr, in_memory=True) - self.assertIsInstance(multi_regr, Raster) - self.assertEqual(multi_regr.count, 4) - multi_regr.close() - - def test_classification_with_single_constant(self): - training_pt = gpd.read_file(nc.points) - df_points = self.stack_nc.extract_vector(gdf=training_pt) - df_points["class_id"] = training_pt["id"].values - df_points = df_points.dropna() - - # classification with a single constant - df_points["constant"] = 1 - - clf = RandomForestClassifier(n_estimators=50) - X = df_points.drop(columns=["class_id", "geometry"]).values - y = df_points.class_id.values - clf.fit(X, y) - - cla = self.stack_nc.predict( - estimator=clf, - dtype="int16", - nodata=0, - constants=[1] - ) - self.assertIsInstance(cla, Raster) - self.assertEqual(cla.count, 1) - self.assertEqual(cla.read(masked=True).count(), 135092) - - probs = self.stack_nc.predict_proba(estimator=clf, constants=[1]) - self.assertIsInstance(cla, Raster) - self.assertEqual(probs.count, 7) - - for layer in probs.values(): - self.assertEqual(layer.read(masked=True).count(), 135092) - - def test_classification_with_list_constants(self): - training_pt = gpd.read_file(nc.points) - df_points = self.stack_nc.extract_vector(gdf=training_pt) - df_points["class_id"] = training_pt["id"].values - df_points = df_points.dropna() - - df_points["constant"] = 1 - df_points["constant2"] = 2 - - clf = RandomForestClassifier(n_estimators=50) - X = df_points.drop(columns=["class_id", "geometry"]).values - y = df_points.class_id.values - clf.fit(X, y) - - cla = self.stack_nc.predict( - estimator=clf, - dtype="int16", - nodata=0, - constants=[1, 2] - ) - self.assertIsInstance(cla, Raster) - self.assertEqual(cla.count, 1) - self.assertEqual(cla.read(masked=True).count(), 135092) - - probs = self.stack_nc.predict_proba(estimator=clf, constants=[1, 2]) - self.assertIsInstance(cla, Raster) - self.assertEqual(probs.count, 7) - - for layer in probs.values(): - self.assertEqual(layer.read(masked=True).count(), 135092) - - def test_classification_with_dict_constants(self): - # classification using constant to replace an existing layer - training_pt = gpd.read_file(nc.points) - df_points = self.stack_nc.extract_vector(gdf=training_pt) - df_points["class_id"] = training_pt["id"].values - df_points = df_points.dropna() - - clf = RandomForestClassifier(n_estimators=50) - X = df_points.drop(columns=["class_id", "geometry"]).values - y = df_points.class_id.values - clf.fit(X, y) - - cla = self.stack_nc.predict( - estimator=clf, - dtype="int16", - nodata=0, - constants={"lsat7_2000_10": 150} - ) - self.assertIsInstance(cla, Raster) - self.assertEqual(cla.count, 1) - self.assertEqual(cla.read(masked=True).count(), 135092) - - probs = self.stack_nc.predict_proba(estimator=clf, constants={"lsat7_2000_10": 150}) - self.assertIsInstance(cla, Raster) - self.assertEqual(probs.count, 7) - - for layer in probs.values(): - self.assertEqual(layer.read(masked=True).count(), 135092) +from unittest import TestCase + +import geopandas as gpd +from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor + +import pyspatialml.datasets.meuse as ms +from pyspatialml import Raster +from pyspatialml.datasets import nc + + +class TestPrediction(TestCase): + nc_predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] + stack_nc = Raster(nc_predictors) + stack_meuse = Raster(ms.predictors) + + def test_classification(self): + training_pt = gpd.read_file(nc.points) + df_points = self.stack_nc.extract_vector(gdf=training_pt) + df_points["class_id"] = training_pt["id"].values + df_points = df_points.dropna() + + clf = RandomForestClassifier(n_estimators=50) + X = df_points.drop(columns=["class_id", "geometry"]).values + y = df_points.class_id.values + clf.fit(X, y) + + # classification + cla = self.stack_nc.predict(estimator=clf, dtype="int16", nodata=0) + self.assertIsInstance(cla, Raster) + self.assertEqual(cla.count, 1) + self.assertEqual(cla.read(masked=True).count(), 135092) + + # class probabilities + probs = self.stack_nc.predict_proba(estimator=clf) + self.assertIsInstance(cla, Raster) + self.assertEqual(probs.count, 7) + + for layer in probs.values(): + self.assertEqual(layer.read(masked=True).count(), 135092) + + def test_classification_in_memory(self): + training_pt = gpd.read_file(nc.points) + df_points = self.stack_nc.extract_vector(gdf=training_pt) + df_points["class_id"] = training_pt["id"].values + df_points = df_points.dropna() + + clf = RandomForestClassifier(n_estimators=50) + X = df_points.drop(columns=["class_id", "geometry"]).values + y = df_points.class_id.values + clf.fit(X, y) + + # classification + cla = self.stack_nc.predict(estimator=clf, dtype="int16", nodata=0, + in_memory=True) + self.assertIsInstance(cla, Raster) + self.assertEqual(cla.count, 1) + self.assertEqual(cla.read(masked=True).count(), 135092) + cla.close() + + # class probabilities + probs = self.stack_nc.predict_proba(estimator=clf, in_memory=True) + self.assertIsInstance(cla, Raster) + self.assertEqual(probs.count, 7) + + for layer in probs.values(): + self.assertEqual(layer.read(masked=True).count(), 135092) + + probs.close() + + def test_regression(self): + training_pt = gpd.read_file(ms.meuse) + training = self.stack_meuse.extract_vector(gdf=training_pt) + training["zinc"] = training_pt["zinc"].values + training["cadmium"] = training_pt["cadmium"].values + training["copper"] = training_pt["copper"].values + training["lead"] = training_pt["lead"].values + training = training.dropna() + + # single target regression + regr = RandomForestRegressor(n_estimators=50) + X = training.loc[:, self.stack_meuse.names].values + y = training["zinc"].values + regr.fit(X, y) + + single_regr = self.stack_meuse.predict(regr) + self.assertIsInstance(single_regr, Raster) + self.assertEqual(single_regr.count, 1) + single_regr.close() + + # multi-target regression + y = training.loc[:, ["zinc", "cadmium", "copper", "lead"]] + regr.fit(X, y) + multi_regr = self.stack_meuse.predict(regr) + self.assertIsInstance(multi_regr, Raster) + self.assertEqual(multi_regr.count, 4) + multi_regr.close() + + def test_regression_in_memory(self): + training_pt = gpd.read_file(ms.meuse) + training = self.stack_meuse.extract_vector(gdf=training_pt) + training["zinc"] = training_pt["zinc"].values + training["cadmium"] = training_pt["cadmium"].values + training["copper"] = training_pt["copper"].values + training["lead"] = training_pt["lead"].values + training = training.dropna() + + # single target regression + regr = RandomForestRegressor(n_estimators=50) + X = training.loc[:, self.stack_meuse.names].values + y = training["zinc"].values + regr.fit(X, y) + + single_regr = self.stack_meuse.predict(regr, in_memory=True) + self.assertIsInstance(single_regr, Raster) + self.assertEqual(single_regr.count, 1) + single_regr.close() + + # multi-target regression + y = training.loc[:, ["zinc", "cadmium", "copper", "lead"]] + regr.fit(X, y) + multi_regr = self.stack_meuse.predict(regr, in_memory=True) + self.assertIsInstance(multi_regr, Raster) + self.assertEqual(multi_regr.count, 4) + multi_regr.close() + + def test_classification_with_single_constant(self): + training_pt = gpd.read_file(nc.points) + df_points = self.stack_nc.extract_vector(gdf=training_pt) + df_points["class_id"] = training_pt["id"].values + df_points = df_points.dropna() + + # classification with a single constant + df_points["constant"] = 1 + + clf = RandomForestClassifier(n_estimators=50) + X = df_points.drop(columns=["class_id", "geometry"]).values + y = df_points.class_id.values + clf.fit(X, y) + + cla = self.stack_nc.predict( + estimator=clf, + dtype="int16", + nodata=0, + constants=[1] + ) + self.assertIsInstance(cla, Raster) + self.assertEqual(cla.count, 1) + self.assertEqual(cla.read(masked=True).count(), 135092) + + probs = self.stack_nc.predict_proba(estimator=clf, constants=[1]) + self.assertIsInstance(cla, Raster) + self.assertEqual(probs.count, 7) + + for layer in probs.values(): + self.assertEqual(layer.read(masked=True).count(), 135092) + + def test_classification_with_list_constants(self): + training_pt = gpd.read_file(nc.points) + df_points = self.stack_nc.extract_vector(gdf=training_pt) + df_points["class_id"] = training_pt["id"].values + df_points = df_points.dropna() + + df_points["constant"] = 1 + df_points["constant2"] = 2 + + clf = RandomForestClassifier(n_estimators=50) + X = df_points.drop(columns=["class_id", "geometry"]).values + y = df_points.class_id.values + clf.fit(X, y) + + cla = self.stack_nc.predict( + estimator=clf, + dtype="int16", + nodata=0, + constants=[1, 2] + ) + self.assertIsInstance(cla, Raster) + self.assertEqual(cla.count, 1) + self.assertEqual(cla.read(masked=True).count(), 135092) + + probs = self.stack_nc.predict_proba(estimator=clf, constants=[1, 2]) + self.assertIsInstance(cla, Raster) + self.assertEqual(probs.count, 7) + + for layer in probs.values(): + self.assertEqual(layer.read(masked=True).count(), 135092) + + def test_classification_with_dict_constants(self): + # classification using constant to replace an existing layer + training_pt = gpd.read_file(nc.points) + df_points = self.stack_nc.extract_vector(gdf=training_pt) + df_points["class_id"] = training_pt["id"].values + df_points = df_points.dropna() + + clf = RandomForestClassifier(n_estimators=50) + X = df_points.drop(columns=["class_id", "geometry"]).values + y = df_points.class_id.values + clf.fit(X, y) + + cla = self.stack_nc.predict( + estimator=clf, + dtype="int16", + nodata=0, + constants={"lsat7_2000_10": 150} + ) + self.assertIsInstance(cla, Raster) + self.assertEqual(cla.count, 1) + self.assertEqual(cla.read(masked=True).count(), 135092) + + probs = self.stack_nc.predict_proba(estimator=clf, constants={"lsat7_2000_10": 150}) + self.assertIsInstance(cla, Raster) + self.assertEqual(probs.count, 7) + + for layer in probs.values(): + self.assertEqual(layer.read(masked=True).count(), 135092) diff --git a/tests/test_rename.py b/tests/test_rename.py index fdd8072..70d3b17 100644 --- a/tests/test_rename.py +++ b/tests/test_rename.py @@ -1,106 +1,106 @@ -from unittest import TestCase - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster -import rasterio -import numpy as np -import os -from tempfile import NamedTemporaryFile - - -class TestRename(TestCase): - def setUp(self) -> None: - self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, - nc.band7] - - def test_rename_inplace(self): - stack = Raster(self.predictors) - band3_stats = stack.lsat7_2000_30.mean() - - # rename band 3 - stack.rename(names={"lsat7_2000_30": "new_name"}, in_place=True) - - # check that renaming occurred in Raster - self.assertEqual(list(stack.names)[2], "new_name") - self.assertNotIn("lsat7_2000_30", stack.names) - - # check that Raster layer properties also renamed - self.assertIn("new_name", dir(stack)) - self.assertNotIn("lsat7_2000_30", dir(stack)) - - # check that internal name of RasterLayer was also renamed - self.assertEqual(stack.iloc[2].name, "new_name") - - # check that the RasterLayer attached to the new name is the same - self.assertEqual(stack["new_name"].mean(), band3_stats) - self.assertEqual(stack.new_name.mean(), band3_stats) - self.assertEqual(stack.iloc[2].mean(), band3_stats) - - # check that a new Raster object derived from the renamed data - # have the right names - new_raster = Raster(src=stack.iloc[2]) - self.assertIn("new_name", new_raster.names) - - def test_rename_with_copy(self): - stack = Raster(self.predictors) - names = list(stack.names) - band3_stats = stack.lsat7_2000_30.mean() - - # rename band 3 - result = stack.rename(names={"lsat7_2000_30": "new_name"}, - in_place=False) - - # check that original is untouched - self.assertEqual(list(stack.names), names) - - # check that renaming occurred in Raster - self.assertEqual(list(result.names)[2], "new_name") - self.assertNotIn("lsat7_2000_30", result.names) - - # check that Raster layer properties also renamed - self.assertIn("new_name", dir(result)) - self.assertNotIn("lsat7_2000_30", dir(result)) - - # check that internal name of RasterLayer was also renamed - self.assertEqual(result.iloc[2].name, "new_name") - - # check that the RasterLayer attached to the new name is the same - self.assertEqual(result["new_name"].mean(), band3_stats) - self.assertEqual(result["new_name"].mean(), band3_stats) - self.assertEqual(result.new_name.mean(), band3_stats) - self.assertEqual(result.iloc[2].mean(), band3_stats) - - # check that a new Raster object derived from the renamed data - # have the right names - new_raster = Raster(src=result.iloc[2]) - self.assertIn("new_name", new_raster.names) - - def rename_multiband(self): - # Create a fake 3-band image for testing - arr = np.random.rand(3, 64, 64) - file = NamedTemporaryFile(prefix="test", suffix=".tif").name - layer_name = os.path.basename(file).split(".")[0] - layer_names = ["_".join([layer_name, str(i)]) for i in [1, 2 ,3]] - - with rasterio.open(file, "w", width=64, height=64, count=3, dtype=np.float32) as dst: - dst.write(arr) - - r = Raster(file) - self.assertListEqual(list(r.names), layer_names) - - renamed = r.rename(dict(zip(r.names, ["Red", "Green", "Blue"]))) - self.assertListEqual(list(renamed.names), ["Red", "Green", "Blue"]) - - def rename_in_memory(self): - # Create a fake 3-band image for testing - arr = np.random.rand(3, 64, 64) - file = NamedTemporaryFile(prefix="test", suffix=".tif").name - - with rasterio.open(file, "w", width=64, height=64, count=3, dtype=np.float32) as dst: - dst.write(arr) - - r = Raster(file) - in_memory = r.aggregate((32, 32), in_memory=True) - - renamed = r.rename(dict(zip(in_memory.names, ["Red", "Green", "Blue"]))) - self.assertListEqual(list(renamed.names), ["Red", "Green", "Blue"]) +from unittest import TestCase + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster +import rasterio +import numpy as np +import os +from tempfile import NamedTemporaryFile + + +class TestRename(TestCase): + def setUp(self) -> None: + self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, + nc.band7] + + def test_rename_inplace(self): + stack = Raster(self.predictors) + band3_stats = stack.lsat7_2000_30.mean() + + # rename band 3 + stack.rename(names={"lsat7_2000_30": "new_name"}, in_place=True) + + # check that renaming occurred in Raster + self.assertEqual(list(stack.names)[2], "new_name") + self.assertNotIn("lsat7_2000_30", stack.names) + + # check that Raster layer properties also renamed + self.assertIn("new_name", dir(stack)) + self.assertNotIn("lsat7_2000_30", dir(stack)) + + # check that internal name of RasterLayer was also renamed + self.assertEqual(stack.iloc[2].name, "new_name") + + # check that the RasterLayer attached to the new name is the same + self.assertEqual(stack["new_name"].mean(), band3_stats) + self.assertEqual(stack.new_name.mean(), band3_stats) + self.assertEqual(stack.iloc[2].mean(), band3_stats) + + # check that a new Raster object derived from the renamed data + # have the right names + new_raster = Raster(src=stack.iloc[2]) + self.assertIn("new_name", new_raster.names) + + def test_rename_with_copy(self): + stack = Raster(self.predictors) + names = list(stack.names) + band3_stats = stack.lsat7_2000_30.mean() + + # rename band 3 + result = stack.rename(names={"lsat7_2000_30": "new_name"}, + in_place=False) + + # check that original is untouched + self.assertEqual(list(stack.names), names) + + # check that renaming occurred in Raster + self.assertEqual(list(result.names)[2], "new_name") + self.assertNotIn("lsat7_2000_30", result.names) + + # check that Raster layer properties also renamed + self.assertIn("new_name", dir(result)) + self.assertNotIn("lsat7_2000_30", dir(result)) + + # check that internal name of RasterLayer was also renamed + self.assertEqual(result.iloc[2].name, "new_name") + + # check that the RasterLayer attached to the new name is the same + self.assertEqual(result["new_name"].mean(), band3_stats) + self.assertEqual(result["new_name"].mean(), band3_stats) + self.assertEqual(result.new_name.mean(), band3_stats) + self.assertEqual(result.iloc[2].mean(), band3_stats) + + # check that a new Raster object derived from the renamed data + # have the right names + new_raster = Raster(src=result.iloc[2]) + self.assertIn("new_name", new_raster.names) + + def rename_multiband(self): + # Create a fake 3-band image for testing + arr = np.random.rand(3, 64, 64) + file = NamedTemporaryFile(prefix="test", suffix=".tif").name + layer_name = os.path.basename(file).split(".")[0] + layer_names = ["_".join([layer_name, str(i)]) for i in [1, 2 ,3]] + + with rasterio.open(file, "w", width=64, height=64, count=3, dtype=np.float32) as dst: + dst.write(arr) + + r = Raster(file) + self.assertListEqual(list(r.names), layer_names) + + renamed = r.rename(dict(zip(r.names, ["Red", "Green", "Blue"]))) + self.assertListEqual(list(renamed.names), ["Red", "Green", "Blue"]) + + def rename_in_memory(self): + # Create a fake 3-band image for testing + arr = np.random.rand(3, 64, 64) + file = NamedTemporaryFile(prefix="test", suffix=".tif").name + + with rasterio.open(file, "w", width=64, height=64, count=3, dtype=np.float32) as dst: + dst.write(arr) + + r = Raster(file) + in_memory = r.aggregate((32, 32), in_memory=True) + + renamed = r.rename(dict(zip(in_memory.names, ["Red", "Green", "Blue"]))) + self.assertListEqual(list(renamed.names), ["Red", "Green", "Blue"]) diff --git a/tests/test_sample.py b/tests/test_sample.py index 713d5fa..d725361 100644 --- a/tests/test_sample.py +++ b/tests/test_sample.py @@ -1,42 +1,42 @@ -from unittest import TestCase - -import numpy as np - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestSample(TestCase): - def setUp(self) -> None: - predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] - self.stack = Raster(predictors) - self.strata = Raster(nc.strata) - - def tearDown(self) -> None: - self.stack.close() - self.strata.close() - - def test_sample_strata(self): - # extract using a strata raster and returning two arrays - size = 100 - categories = self.strata.read(masked=True).flatten() - categories = categories[~categories.mask] - n_categories = np.unique(categories).shape[0] - n_samples = size * n_categories - - X, xy = self.stack.sample(size=size, strata=self.strata, return_array=True) - self.assertEqual(X.shape, (n_samples, 6)) - self.assertEqual(xy.shape, (n_samples, 2)) - - # extract using a strata raster and returning a dataframe - samples = self.stack.sample(size=size, strata=self.strata, return_array=False) - self.assertEqual(samples.shape, (n_samples, 7)) - - def test_sample_no_strata(self): - size = 100 - X, xy = self.stack.sample(size=size, return_array=True) - self.assertEqual(X.shape, (size, 6)) - self.assertEqual(xy.shape, (size, 2)) - - samples = self.stack.sample(size=size, return_array=False) +from unittest import TestCase + +import numpy as np + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestSample(TestCase): + def setUp(self) -> None: + predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] + self.stack = Raster(predictors) + self.strata = Raster(nc.strata) + + def tearDown(self) -> None: + self.stack.close() + self.strata.close() + + def test_sample_strata(self): + # extract using a strata raster and returning two arrays + size = 100 + categories = self.strata.read(masked=True).flatten() + categories = categories[~categories.mask] + n_categories = np.unique(categories).shape[0] + n_samples = size * n_categories + + X, xy = self.stack.sample(size=size, strata=self.strata, return_array=True) + self.assertEqual(X.shape, (n_samples, 6)) + self.assertEqual(xy.shape, (n_samples, 2)) + + # extract using a strata raster and returning a dataframe + samples = self.stack.sample(size=size, strata=self.strata, return_array=False) + self.assertEqual(samples.shape, (n_samples, 7)) + + def test_sample_no_strata(self): + size = 100 + X, xy = self.stack.sample(size=size, return_array=True) + self.assertEqual(X.shape, (size, 6)) + self.assertEqual(xy.shape, (size, 2)) + + samples = self.stack.sample(size=size, return_array=False) self.assertEqual(samples.shape, (size, 7)) \ No newline at end of file diff --git a/tests/test_stats.py b/tests/test_stats.py index ce86a6f..6de9f74 100644 --- a/tests/test_stats.py +++ b/tests/test_stats.py @@ -1,33 +1,33 @@ -import numpy as np -import unittest -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestStats(unittest.TestCase): - def setUp(self) -> None: - predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] - self.predictors = predictors - self.stack = Raster(predictors) - - def test_rasterstats(self): - self.assertEqual(len(self.stack.min()), len(self.predictors)) - self.assertTrue(~np.isnan(self.stack.min()).all()) - - self.assertEqual(len(self.stack.max()), len(self.predictors)) - self.assertTrue(~np.isnan(self.stack.max()).all()) - - self.assertEqual(len(self.stack.mean()), len(self.predictors)) - self.assertTrue(~np.isnan(self.stack.mean()).all()) - - self.assertEqual(len(self.stack.median()), len(self.predictors)) - self.assertTrue(~np.isnan(self.stack.median()).all()) - - def test_layerstats(self): - self.assertEqual(self.stack.iloc[0].min(), 56.0) - self.assertEqual(self.stack.iloc[0].max(), 255.0) - self.assertAlmostEqual(self.stack.iloc[0].mean(), 80.6, places=0) - self.assertEqual(self.stack.iloc[0].median(), 75.0) - -if __name__ == '__main__': - unittest.main() +import numpy as np +import unittest +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestStats(unittest.TestCase): + def setUp(self) -> None: + predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, nc.band7] + self.predictors = predictors + self.stack = Raster(predictors) + + def test_rasterstats(self): + self.assertEqual(len(self.stack.min()), len(self.predictors)) + self.assertTrue(~np.isnan(self.stack.min()).all()) + + self.assertEqual(len(self.stack.max()), len(self.predictors)) + self.assertTrue(~np.isnan(self.stack.max()).all()) + + self.assertEqual(len(self.stack.mean()), len(self.predictors)) + self.assertTrue(~np.isnan(self.stack.mean()).all()) + + self.assertEqual(len(self.stack.median()), len(self.predictors)) + self.assertTrue(~np.isnan(self.stack.median()).all()) + + def test_layerstats(self): + self.assertEqual(self.stack.iloc[0].min(), 56.0) + self.assertEqual(self.stack.iloc[0].max(), 255.0) + self.assertAlmostEqual(self.stack.iloc[0].mean(), 80.6, places=0) + self.assertEqual(self.stack.iloc[0].median(), 75.0) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_tocrs.py b/tests/test_tocrs.py index 3a4b5b0..1f31314 100644 --- a/tests/test_tocrs.py +++ b/tests/test_tocrs.py @@ -1,62 +1,62 @@ -from unittest import TestCase - -import pyspatialml.datasets.nc as nc -from pyspatialml import Raster - - -class TestToCrs(TestCase): - def setUp(self) -> None: - # test inputs - predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, - nc.band7] - self.stack = Raster(predictors) - - # test results - self.stack_prj = None - - def tearDown(self) -> None: - self.stack.close() - self.stack_prj.close() - - def test_to_crs_defaults(self): - self.stack_prj = self.stack.to_crs({"init": "EPSG:4326"}) - - # check raster object - self.assertIsInstance(self.stack_prj, Raster) - self.assertEqual(self.stack_prj.count, self.stack.count) - self.assertEqual(self.stack_prj.read(masked=True).count(), 1012061) - - # test nodata value is recognized - self.assertEqual( - self.stack_prj.read(masked=True).min(), - self.stack.read(masked=True).min() - ) - self.assertEqual( - self.stack_prj.read(masked=True).max(), - self.stack.read(masked=True).max() - ) - - def test_to_crs_custom_nodata(self): - self.stack_prj = self.stack.to_crs({"init": "EPSG:4326"}, nodata=-999) - - # check raster object - self.assertIsInstance(self.stack_prj, Raster) - self.assertEqual(self.stack_prj.count, self.stack.count) - self.assertEqual(self.stack_prj.read(masked=True).count(), 1012061) - - # test nodata value is recognized - self.assertEqual( - self.stack_prj.read(masked=True).min(), - self.stack.read(masked=True).min() - ) - self.assertEqual( - self.stack_prj.read(masked=True).max(), - self.stack.read(masked=True).max() - ) - - def test_to_crs_in_memory(self): - self.stack_prj = self.stack.to_crs({"init": "EPSG:4326"}, - in_memory=True) - - # check raster object - self.assertIsInstance(self.stack_prj, Raster) +from unittest import TestCase + +import pyspatialml.datasets.nc as nc +from pyspatialml import Raster + + +class TestToCrs(TestCase): + def setUp(self) -> None: + # test inputs + predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, + nc.band7] + self.stack = Raster(predictors) + + # test results + self.stack_prj = None + + def tearDown(self) -> None: + self.stack.close() + self.stack_prj.close() + + def test_to_crs_defaults(self): + self.stack_prj = self.stack.to_crs({"init": "EPSG:4326"}) + + # check raster object + self.assertIsInstance(self.stack_prj, Raster) + self.assertEqual(self.stack_prj.count, self.stack.count) + self.assertEqual(self.stack_prj.read(masked=True).count(), 1012061) + + # test nodata value is recognized + self.assertEqual( + self.stack_prj.read(masked=True).min(), + self.stack.read(masked=True).min() + ) + self.assertEqual( + self.stack_prj.read(masked=True).max(), + self.stack.read(masked=True).max() + ) + + def test_to_crs_custom_nodata(self): + self.stack_prj = self.stack.to_crs({"init": "EPSG:4326"}, nodata=-999) + + # check raster object + self.assertIsInstance(self.stack_prj, Raster) + self.assertEqual(self.stack_prj.count, self.stack.count) + self.assertEqual(self.stack_prj.read(masked=True).count(), 1012061) + + # test nodata value is recognized + self.assertEqual( + self.stack_prj.read(masked=True).min(), + self.stack.read(masked=True).min() + ) + self.assertEqual( + self.stack_prj.read(masked=True).max(), + self.stack.read(masked=True).max() + ) + + def test_to_crs_in_memory(self): + self.stack_prj = self.stack.to_crs({"init": "EPSG:4326"}, + in_memory=True) + + # check raster object + self.assertIsInstance(self.stack_prj, Raster) diff --git a/tests/test_transformers.py b/tests/test_transformers.py index 7c6c250..4e31c39 100644 --- a/tests/test_transformers.py +++ b/tests/test_transformers.py @@ -1,15 +1,15 @@ -from unittest import TestCase -from pyspatialml.transformers import AspectTransformer - -import numpy as np - -class TestTransformers(TestCase): - def test_aspect_transformer(self): - trans = AspectTransformer() - dirs = np.arange(0, 360, 1, dtype=np.float32) - - mag = trans.fit_transform(dirs) - inverse = trans.inverse_transform(mag) - inverse = inverse.round(0) - - self.assertListEqual(dirs.tolist(), inverse.tolist()) +from unittest import TestCase +from pyspatialml.transformers import AspectTransformer + +import numpy as np + +class TestTransformers(TestCase): + def test_aspect_transformer(self): + trans = AspectTransformer() + dirs = np.arange(0, 360, 1, dtype=np.float32) + + mag = trans.fit_transform(dirs) + inverse = trans.inverse_transform(mag) + inverse = inverse.round(0) + + self.assertListEqual(dirs.tolist(), inverse.tolist()) diff --git a/tests/test_write.py b/tests/test_write.py index 1c7a8ed..d745ac7 100644 --- a/tests/test_write.py +++ b/tests/test_write.py @@ -1,28 +1,28 @@ -from unittest import TestCase -from tempfile import NamedTemporaryFile - -from pyspatialml import Raster -from pyspatialml.datasets import nc - - -class TestWrite(TestCase): - def setUp(self) -> None: - # inputs - self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, - nc.band7] - - # test results - self.stack = None - - def tearDown(self) -> None: - self.stack.close() - - def test_write(self): - # test writing to file - self.stack = Raster(self.predictors) - fp = NamedTemporaryFile(suffix=".tif").name - - result = self.stack.write(fp) - - self.assertIsInstance(result, Raster) - self.assertEqual(result.count, self.stack.count) +from unittest import TestCase +from tempfile import NamedTemporaryFile + +from pyspatialml import Raster +from pyspatialml.datasets import nc + + +class TestWrite(TestCase): + def setUp(self) -> None: + # inputs + self.predictors = [nc.band1, nc.band2, nc.band3, nc.band4, nc.band5, + nc.band7] + + # test results + self.stack = None + + def tearDown(self) -> None: + self.stack.close() + + def test_write(self): + # test writing to file + self.stack = Raster(self.predictors) + fp = NamedTemporaryFile(suffix=".tif").name + + result = self.stack.write(fp) + + self.assertIsInstance(result, Raster) + self.assertEqual(result.count, self.stack.count)