diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 00000000..bbf79725 --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1 @@ +- @azure Rule - Use Azure Best Practices: When generating code for Azure, running terminal commands for Azure, or performing operations related to Azure, invoke your `azure_development-get_best_practices` tool if available. \ No newline at end of file diff --git a/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt new file mode 100644 index 00000000..9d227a0c --- /dev/null +++ b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/METADATA b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/METADATA new file mode 100644 index 00000000..82261f2a --- /dev/null +++ b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/METADATA @@ -0,0 +1,92 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 3.0.2 +Summary: Safely add untrusted strings to HTML/XML markup. +Maintainer-email: Pallets +License: Copyright 2010 Pallets + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ +Project-URL: Source, https://github.com/pallets/markupsafe/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Typing :: Typed +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE.txt + +# MarkupSafe + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +## Examples + +```pycon +>>> from markupsafe import Markup, escape + +>>> # escape replaces special characters and wraps in Markup +>>> escape("") +Markup('<script>alert(document.cookie);</script>') + +>>> # wrap in Markup to mark text "safe" and prevent escaping +>>> Markup("Hello") +Markup('hello') + +>>> escape(Markup("Hello")) +Markup('hello') + +>>> # Markup is a str subclass +>>> # methods and operators escape their arguments +>>> template = Markup("Hello {name}") +>>> template.format(name='"World"') +Markup('Hello "World"') +``` + +## Donate + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +[please donate today][]. + +[please donate today]: https://palletsprojects.com/donate diff --git a/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/RECORD b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/RECORD new file mode 100644 index 00000000..26bf81c1 --- /dev/null +++ b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/RECORD @@ -0,0 +1,14 @@ +MarkupSafe-3.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-3.0.2.dist-info/LICENSE.txt,sha256=RjHsDbX9kKVH4zaBcmTGeYIUM4FG-KyUtKV_lu6MnsQ,1503 +MarkupSafe-3.0.2.dist-info/METADATA,sha256=nhoabjupBG41j_JxPCJ3ylgrZ6Fx8oMCFbiLF9Kafqc,4067 +MarkupSafe-3.0.2.dist-info/RECORD,, +MarkupSafe-3.0.2.dist-info/WHEEL,sha256=62QJgqtUFevqILau0n0UncooEMoOyVCKVQitJpcuCig,101 +MarkupSafe-3.0.2.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=pREerPwvinB62tNCMOwqxBS2YHV6R52Wcq1d-rB4Z5o,13609 +markupsafe/__pycache__/__init__.cpython-312.pyc,, +markupsafe/__pycache__/_native.cpython-312.pyc,, +markupsafe/_native.py,sha256=2ptkJ40yCcp9kq3L1NqpgjfpZB-obniYKFFKUOkHh4Q,218 +markupsafe/_speedups.c,sha256=SglUjn40ti9YgQAO--OgkSyv9tXq9vvaHyVhQows4Ok,4353 +markupsafe/_speedups.cp312-win_amd64.pyd,sha256=sC88mCi7HJOQhbSSrdMPZfdCvi_VBfOzwkVuQ7V6T3M,13312 +markupsafe/_speedups.pyi,sha256=LSDmXYOefH4HVpAXuL8sl7AttLw0oXh1njVoVZp2wqQ,42 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL new file mode 100644 index 00000000..bd4c259e --- /dev/null +++ b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.2.0) +Root-Is-Purelib: false +Tag: cp312-cp312-win_amd64 + diff --git a/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt new file mode 100644 index 00000000..75bf7292 --- /dev/null +++ b/.venv/Lib/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/INSTALLER b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/LICENSE b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/LICENSE new file mode 100644 index 00000000..2f1b8e15 --- /dev/null +++ b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2017-2021 Ingy döt Net +Copyright (c) 2006-2016 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/METADATA b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/METADATA new file mode 100644 index 00000000..db029b77 --- /dev/null +++ b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/METADATA @@ -0,0 +1,46 @@ +Metadata-Version: 2.1 +Name: PyYAML +Version: 6.0.2 +Summary: YAML parser and emitter for Python +Home-page: https://pyyaml.org/ +Download-URL: https://pypi.org/project/PyYAML/ +Author: Kirill Simonov +Author-email: xi@resolvent.net +License: MIT +Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues +Project-URL: CI, https://github.com/yaml/pyyaml/actions +Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation +Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core +Project-URL: Source Code, https://github.com/yaml/pyyaml +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup +Requires-Python: >=3.8 +License-File: LICENSE + +YAML is a data serialization format designed for human readability +and interaction with scripting languages. PyYAML is a YAML parser +and emitter for Python. + +PyYAML features a complete YAML 1.1 parser, Unicode support, pickle +support, capable extension API, and sensible error messages. PyYAML +supports standard YAML tags and provides Python-specific tags that +allow to represent an arbitrary Python object. + +PyYAML is applicable for a broad range of tasks from complex +configuration files to object serialization and persistence. diff --git a/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/RECORD b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/RECORD new file mode 100644 index 00000000..61653255 --- /dev/null +++ b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/RECORD @@ -0,0 +1,43 @@ +PyYAML-6.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PyYAML-6.0.2.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 +PyYAML-6.0.2.dist-info/METADATA,sha256=9lwXqTOrXPts-jI2Lo5UwuaAYo0hiRA0BZqjch0WjAk,2106 +PyYAML-6.0.2.dist-info/RECORD,, +PyYAML-6.0.2.dist-info/WHEEL,sha256=c7SWG1_hRvc9HXHEkmWlTu1Jr4WpzRucfzqTP-_8q0s,102 +PyYAML-6.0.2.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 +_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 +_yaml/__pycache__/__init__.cpython-312.pyc,, +yaml/__init__.py,sha256=N35S01HMesFTe0aRRMWkPj0Pa8IEbHpE9FK7cr5Bdtw,12311 +yaml/__pycache__/__init__.cpython-312.pyc,, +yaml/__pycache__/composer.cpython-312.pyc,, +yaml/__pycache__/constructor.cpython-312.pyc,, +yaml/__pycache__/cyaml.cpython-312.pyc,, +yaml/__pycache__/dumper.cpython-312.pyc,, +yaml/__pycache__/emitter.cpython-312.pyc,, +yaml/__pycache__/error.cpython-312.pyc,, +yaml/__pycache__/events.cpython-312.pyc,, +yaml/__pycache__/loader.cpython-312.pyc,, +yaml/__pycache__/nodes.cpython-312.pyc,, +yaml/__pycache__/parser.cpython-312.pyc,, +yaml/__pycache__/reader.cpython-312.pyc,, +yaml/__pycache__/representer.cpython-312.pyc,, +yaml/__pycache__/resolver.cpython-312.pyc,, +yaml/__pycache__/scanner.cpython-312.pyc,, +yaml/__pycache__/serializer.cpython-312.pyc,, +yaml/__pycache__/tokens.cpython-312.pyc,, +yaml/_yaml.cp312-win_amd64.pyd,sha256=Bx7e_LEQx7cnd1_A9_nClp3X77g-_Lw1aoAAtYZbwWk,263680 +yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 +yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 +yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 +yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837 +yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006 +yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533 +yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 +yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061 +yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 +yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495 +yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794 +yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190 +yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004 +yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279 +yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165 +yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 diff --git a/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/WHEEL b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/WHEEL new file mode 100644 index 00000000..a4e7d83d --- /dev/null +++ b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.44.0) +Root-Is-Purelib: false +Tag: cp312-cp312-win_amd64 + diff --git a/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/top_level.txt b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/top_level.txt new file mode 100644 index 00000000..e6475e91 --- /dev/null +++ b/.venv/Lib/site-packages/PyYAML-6.0.2.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_yaml +yaml diff --git a/.venv/Lib/site-packages/_yaml/__init__.py b/.venv/Lib/site-packages/_yaml/__init__.py new file mode 100644 index 00000000..7baa8c4b --- /dev/null +++ b/.venv/Lib/site-packages/_yaml/__init__.py @@ -0,0 +1,33 @@ +# This is a stub package designed to roughly emulate the _yaml +# extension module, which previously existed as a standalone module +# and has been moved into the `yaml` package namespace. +# It does not perfectly mimic its old counterpart, but should get +# close enough for anyone who's relying on it even when they shouldn't. +import yaml + +# in some circumstances, the yaml module we imoprted may be from a different version, so we need +# to tread carefully when poking at it here (it may not have the attributes we expect) +if not getattr(yaml, '__with_libyaml__', False): + from sys import version_info + + exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError + raise exc("No module named '_yaml'") +else: + from yaml._yaml import * + import warnings + warnings.warn( + 'The _yaml extension module is now located at yaml._yaml' + ' and its location is subject to change. To use the' + ' LibYAML-based parser and emitter, import from `yaml`:' + ' `from yaml import CLoader as Loader, CDumper as Dumper`.', + DeprecationWarning + ) + del warnings + # Don't `del yaml` here because yaml is actually an existing + # namespace member of _yaml. + +__name__ = '_yaml' +# If the module is top-level (i.e. not a part of any specific package) +# then the attribute should be set to ''. +# https://docs.python.org/3.8/library/types.html +__package__ = '' diff --git a/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/INSTALLER b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/LICENSE b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/LICENSE new file mode 100644 index 00000000..b7af5ef2 --- /dev/null +++ b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2018 Anthony Sottile + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/METADATA b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/METADATA new file mode 100644 index 00000000..2fec9a5d --- /dev/null +++ b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/METADATA @@ -0,0 +1,301 @@ +Metadata-Version: 2.1 +Name: cfgv +Version: 3.4.0 +Summary: Validate configuration and produce human readable error messages. +Home-page: https://github.com/asottile/cfgv +Author: Anthony Sottile +Author-email: asottile@umich.edu +License: MIT +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE + +[![build status](https://github.com/asottile/cfgv/actions/workflows/main.yml/badge.svg)](https://github.com/asottile/cfgv/actions/workflows/main.yml) +[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/asottile/cfgv/main.svg)](https://results.pre-commit.ci/latest/github/asottile/cfgv/main) + +cfgv +==== + +Validate configuration and produce human readable error messages. + +## Installation + +```bash +pip install cfgv +``` + +## Sample error messages + +These are easier to see by example. Here's an example where I typo'd `true` +in a [pre-commit](https://pre-commit.com) configuration. + +``` +pre_commit.clientlib.InvalidConfigError: +==> File /home/asottile/workspace/pre-commit/.pre-commit-config.yaml +==> At Config() +==> At key: repos +==> At Repository(repo='https://github.com/pre-commit/pre-commit-hooks') +==> At key: hooks +==> At Hook(id='flake8') +==> At key: always_run +=====> Expected bool got str +``` + +## API + +### `cfgv.validate(value, schema)` + +Perform validation on the schema: +- raises `ValidationError` on failure +- returns the value on success (for convenience) + +### `cfgv.apply_defaults(value, schema)` + +Returns a new value which sets all missing optional values to their defaults. + +### `cfgv.remove_defaults(value, schema)` + +Returns a new value which removes all optional values that are set to their +defaults. + +### `cfgv.load_from_filename(filename, schema, load_strategy, exc_tp=ValidationError)` + +Load a file given the `load_strategy`. Reraise any errors as `exc_tp`. All +defaults will be populated in the resulting value. + +Most useful when used with `functools.partial` as follows: + +```python +load_my_cfg = functools.partial( + cfgv.load_from_filename, + schema=MY_SCHEMA, + load_strategy=json.loads, + exc_tp=MyError, +) +``` + +## Making a schema + +A schema validates a container -- `cfgv` provides `Map` and `Array` for +most normal cases. + +### writing your own schema container + +If the built-in containers below don't quite satisfy your usecase, you can +always write your own. Containers use the following interface: + +```python +class Container(object): + def check(self, v): + """check the passed in value (do not modify `v`)""" + + def apply_defaults(self, v): + """return a new value with defaults applied (do not modify `v`)""" + + def remove_defaults(self, v): + """return a new value with defaults removed (do not modify `v`)""" +``` + +### `Map(object_name, id_key, *items)` + +The most basic building block for creating a schema is a `Map` + +- `object_name`: will be displayed in error messages +- `id_key`: will be used to identify the object in error messages. Set to + `None` if there is no identifying key for the object. +- `items`: validator objects such as `Required` or `Optional` + +Consider the following schema: + +```python +Map( + 'Repo', 'url', + Required('url', check_any), +) +``` + +In an error message, the map may be displayed as: + +- `Repo(url='https://github.com/pre-commit/pre-commit')` +- `Repo(url=MISSING)` (if the key is not present) + +### `Array(of, allow_empty=True)` + +Used to nest maps inside of arrays. For arrays of scalars, see `check_array`. + +- `of`: A `Map` / `Array` or other sub-schema. +- `allow_empty`: when `False`, `Array` will ensure at least one element. + +When validated, this will check that each element adheres to the sub-schema. + +## Validator objects + +Validator objects are used to validate key-value-pairs of a `Map`. + +### writing your own validator + +If the built-in validators below don't quite satisfy your usecase, you can +always write your own. Validators use the following interface: + +```python +class Validator(object): + def check(self, dct): + """check that your specific key has the appropriate value in `dct`""" + + def apply_default(self, dct): + """modify `dct` and set the default value if it is missing""" + + def remove_default(self, dct): + """modify `dct` and remove the default value if it is present""" +``` + +It may make sense to _borrow_ functions from the built in validators. They +additionally use the following interface(s): + +- `self.key`: the key to check +- `self.check_fn`: the [check function](#check-functions) +- `self.default`: a default value to set. + +### `Required(key, check_fn)` + +Ensure that a key is present in a `Map` and adheres to the +[check function](#check-functions). + +### `RequiredRecurse(key, schema)` + +Similar to `Required`, but uses a [schema](#making-a-schema). + +### `Optional(key, check_fn, default)` + +If a key is present, check that it adheres to the +[check function](#check-functions). + +- `apply_defaults` will set the `default` if it is not present. +- `remove_defaults` will remove the value if it is equal to `default`. + +### `OptionalRecurse(key, schema, default)` + +Similar to `Optional` but uses a [schema](#making-a-schema). + +- `apply_defaults` will set the `default` if it is not present and then + validate it with the schema. +- `remove_defaults` will remove defaults using the schema, and then remove the + value it if it is equal to `default`. + +### `OptionalNoDefault(key, check_fn)` + +Like `Optional`, but does not `apply_defaults` or `remove_defaults`. + +### `Conditional(key, check_fn, condition_key, condition_value, ensure_absent=False)` + +- If `condition_key` is equal to the `condition_value`, the specific `key` +will be checked using the [check function](#check-functions). +- If `ensure_absent` is `True` and the condition check fails, the `key` will +be checked for absense. + +Note that the `condition_value` is checked for equality, so any object +implementing `__eq__` may be used. A few are provided out of the box +for this purpose, see [equality helpers](#equality-helpers). + +### `ConditionalOptional(key, check_fn, default, condition_key, condition_value, ensure_absent=False)` + +Similar to ``Conditional`` and ``Optional``. + +### `ConditionalRecurse(key, schema, condition_key, condition_value, ensure_absent=True)` + +Similar to `Conditional`, but uses a [schema](#making-a-schema). + +### `NoAdditionalKeys(keys)` + +Use in a mapping to ensure that only the `keys` specified are present. + +## Equality helpers + +Equality helpers at the very least implement `__eq__` for their behaviour. + +They may also implement `def describe_opposite(self):` for use in the +`ensure_absent=True` error message (otherwise, the `__repr__` will be used). + +### `Not(val)` + +Returns `True` if the value is not equal to `val`. + +### `In(*values)` + +Returns `True` if the value is contained in `values`. + +### `NotIn(*values)` + +Returns `True` if the value is not contained in `values`. + +## Check functions + +A number of check functions are provided out of the box. + +A check function takes a single parameter, the `value`, and either raises a +`ValidationError` or returns nothing. + +### `check_any(_)` + +A noop check function. + +### `check_type(tp, typename=None)` + +Returns a check function to check for a specific type. Setting `typename` +will replace the type's name in the error message. + +For example: + +```python +Required('key', check_type(int)) +# 'Expected bytes' in both python2 and python3. +Required('key', check_type(bytes, typename='bytes')) +``` + +Several type checking functions are provided out of the box: + +- `check_bool` +- `check_bytes` +- `check_int` +- `check_string` +- `check_text` + +### `check_one_of(possible)` + +Returns a function that checks that the value is contained in `possible`. + +For example: + +```python +Required('language', check_one_of(('javascript', 'python', 'ruby'))) +``` + +### `check_regex(v)` + +Ensures that `v` is a valid python regular expression. + +### `check_array(inner_check)` + +Returns a function that checks that a value is a sequence and that each +value in that sequence adheres to the `inner_check`. + +For example: + +```python +Required('args', check_array(check_string)) +``` + +### `check_and(*fns)` + +Returns a function that performs multiple checks on a value. + +For example: + +```python +Required('language', check_and(check_string, my_check_language)) +``` diff --git a/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/RECORD b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/RECORD new file mode 100644 index 00000000..5cf44223 --- /dev/null +++ b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/cfgv.cpython-312.pyc,, +cfgv-3.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cfgv-3.4.0.dist-info/LICENSE,sha256=Afw_gDGmcrP119isJi5DLz6oEoCfVpfWvFsnC_ZEZWE,1059 +cfgv-3.4.0.dist-info/METADATA,sha256=bknF55QrW050DW54BlRiFw-2hDgnaQHhbjoNS1JvMXw,8545 +cfgv-3.4.0.dist-info/RECORD,, +cfgv-3.4.0.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110 +cfgv-3.4.0.dist-info/top_level.txt,sha256=B_oEtBRII3ENMX1OrlRFqpU4iO4OXC7_nihlUOYiK2c,5 +cfgv.py,sha256=k-EgVAOFdu-lmZxOQ5E1NHD3yVxMs7PXDAzIE9VgCAk,12220 diff --git a/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/WHEEL b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/WHEEL new file mode 100644 index 00000000..9d8f872b --- /dev/null +++ b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/top_level.txt b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/top_level.txt new file mode 100644 index 00000000..db0e8296 --- /dev/null +++ b/.venv/Lib/site-packages/cfgv-3.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +cfgv diff --git a/.venv/Lib/site-packages/cfgv.py b/.venv/Lib/site-packages/cfgv.py new file mode 100644 index 00000000..27ddd98f --- /dev/null +++ b/.venv/Lib/site-packages/cfgv.py @@ -0,0 +1,416 @@ +from __future__ import annotations + +import collections +import contextlib +import os.path +import re +import sys + + +class ValidationError(ValueError): + def __init__(self, error_msg, ctx=None): + super().__init__(error_msg) + self.error_msg = error_msg + self.ctx = ctx + + def __str__(self): + out = '\n' + err = self + while err.ctx is not None: + out += f'==> {err.ctx}\n' + err = err.error_msg + out += f'=====> {err.error_msg}' + return out + + +MISSING = collections.namedtuple('Missing', ())() +type(MISSING).__repr__ = lambda self: 'MISSING' + + +@contextlib.contextmanager +def validate_context(msg): + try: + yield + except ValidationError as e: + _, _, tb = sys.exc_info() + raise ValidationError(e, ctx=msg).with_traceback(tb) from None + + +@contextlib.contextmanager +def reraise_as(tp): + try: + yield + except ValidationError as e: + _, _, tb = sys.exc_info() + raise tp(e).with_traceback(tb) from None + + +def _dct_noop(self, dct): + pass + + +def _check_optional(self, dct): + if self.key not in dct: + return + with validate_context(f'At key: {self.key}'): + self.check_fn(dct[self.key]) + + +def _apply_default_optional(self, dct): + dct.setdefault(self.key, self.default) + + +def _remove_default_optional(self, dct): + if dct.get(self.key, MISSING) == self.default: + del dct[self.key] + + +def _require_key(self, dct): + if self.key not in dct: + raise ValidationError(f'Missing required key: {self.key}') + + +def _check_required(self, dct): + _require_key(self, dct) + _check_optional(self, dct) + + +@property +def _check_fn_recurse(self): + def check_fn(val): + validate(val, self.schema) + return check_fn + + +def _apply_default_required_recurse(self, dct): + dct[self.key] = apply_defaults(dct[self.key], self.schema) + + +def _remove_default_required_recurse(self, dct): + dct[self.key] = remove_defaults(dct[self.key], self.schema) + + +def _apply_default_optional_recurse(self, dct): + if self.key not in dct: + _apply_default_optional(self, dct) + _apply_default_required_recurse(self, dct) + + +def _remove_default_optional_recurse(self, dct): + if self.key in dct: + _remove_default_required_recurse(self, dct) + _remove_default_optional(self, dct) + + +def _get_check_conditional(inner): + def _check_conditional(self, dct): + if dct.get(self.condition_key, MISSING) == self.condition_value: + inner(self, dct) + elif ( + self.condition_key in dct and + self.ensure_absent and self.key in dct + ): + if hasattr(self.condition_value, 'describe_opposite'): + explanation = self.condition_value.describe_opposite() + else: + explanation = f'is not {self.condition_value!r}' + raise ValidationError( + f'Expected {self.key} to be absent when {self.condition_key} ' + f'{explanation}, found {self.key}: {dct[self.key]!r}', + ) + return _check_conditional + + +def _apply_default_conditional_optional(self, dct): + if dct.get(self.condition_key, MISSING) == self.condition_value: + _apply_default_optional(self, dct) + + +def _remove_default_conditional_optional(self, dct): + if dct.get(self.condition_key, MISSING) == self.condition_value: + _remove_default_optional(self, dct) + + +def _apply_default_conditional_recurse(self, dct): + if dct.get(self.condition_key, MISSING) == self.condition_value: + _apply_default_required_recurse(self, dct) + + +def _remove_default_conditional_recurse(self, dct): + if dct.get(self.condition_key, MISSING) == self.condition_value: + _remove_default_required_recurse(self, dct) + + +def _no_additional_keys_check(self, dct): + extra = sorted(set(dct) - set(self.keys)) + if extra: + extra_s = ', '.join(str(x) for x in extra) + keys_s = ', '.join(str(x) for x in self.keys) + raise ValidationError( + f'Additional keys found: {extra_s}. ' + f'Only these keys are allowed: {keys_s}', + ) + + +def _warn_additional_keys_check(self, dct): + extra = sorted(set(dct) - set(self.keys)) + if extra: + self.callback(extra, self.keys, dct) + + +Required = collections.namedtuple('Required', ('key', 'check_fn')) +Required.check = _check_required +Required.apply_default = _dct_noop +Required.remove_default = _dct_noop +RequiredRecurse = collections.namedtuple('RequiredRecurse', ('key', 'schema')) +RequiredRecurse.check = _check_required +RequiredRecurse.check_fn = _check_fn_recurse +RequiredRecurse.apply_default = _apply_default_required_recurse +RequiredRecurse.remove_default = _remove_default_required_recurse +Optional = collections.namedtuple('Optional', ('key', 'check_fn', 'default')) +Optional.check = _check_optional +Optional.apply_default = _apply_default_optional +Optional.remove_default = _remove_default_optional +OptionalRecurse = collections.namedtuple( + 'OptionalRecurse', ('key', 'schema', 'default'), +) +OptionalRecurse.check = _check_optional +OptionalRecurse.check_fn = _check_fn_recurse +OptionalRecurse.apply_default = _apply_default_optional_recurse +OptionalRecurse.remove_default = _remove_default_optional_recurse +OptionalNoDefault = collections.namedtuple( + 'OptionalNoDefault', ('key', 'check_fn'), +) +OptionalNoDefault.check = _check_optional +OptionalNoDefault.apply_default = _dct_noop +OptionalNoDefault.remove_default = _dct_noop +Conditional = collections.namedtuple( + 'Conditional', + ('key', 'check_fn', 'condition_key', 'condition_value', 'ensure_absent'), +) +Conditional.__new__.__defaults__ = (False,) +Conditional.check = _get_check_conditional(_check_required) +Conditional.apply_default = _dct_noop +Conditional.remove_default = _dct_noop +ConditionalOptional = collections.namedtuple( + 'ConditionalOptional', + ( + 'key', 'check_fn', 'default', 'condition_key', 'condition_value', + 'ensure_absent', + ), +) +ConditionalOptional.__new__.__defaults__ = (False,) +ConditionalOptional.check = _get_check_conditional(_check_optional) +ConditionalOptional.apply_default = _apply_default_conditional_optional +ConditionalOptional.remove_default = _remove_default_conditional_optional +ConditionalRecurse = collections.namedtuple( + 'ConditionalRecurse', + ('key', 'schema', 'condition_key', 'condition_value', 'ensure_absent'), +) +ConditionalRecurse.__new__.__defaults__ = (False,) +ConditionalRecurse.check = _get_check_conditional(_check_required) +ConditionalRecurse.check_fn = _check_fn_recurse +ConditionalRecurse.apply_default = _apply_default_conditional_recurse +ConditionalRecurse.remove_default = _remove_default_conditional_recurse +NoAdditionalKeys = collections.namedtuple('NoAdditionalKeys', ('keys',)) +NoAdditionalKeys.check = _no_additional_keys_check +NoAdditionalKeys.apply_default = _dct_noop +NoAdditionalKeys.remove_default = _dct_noop +WarnAdditionalKeys = collections.namedtuple( + 'WarnAdditionalKeys', ('keys', 'callback'), +) +WarnAdditionalKeys.check = _warn_additional_keys_check +WarnAdditionalKeys.apply_default = _dct_noop +WarnAdditionalKeys.remove_default = _dct_noop + + +class Map(collections.namedtuple('Map', ('object_name', 'id_key', 'items'))): + __slots__ = () + + def __new__(cls, object_name, id_key, *items): + return super().__new__(cls, object_name, id_key, items) + + def check(self, v): + if not isinstance(v, dict): + raise ValidationError( + f'Expected a {self.object_name} map but got a ' + f'{type(v).__name__}', + ) + if self.id_key is None: + context = f'At {self.object_name}()' + else: + key_v_s = v.get(self.id_key, MISSING) + context = f'At {self.object_name}({self.id_key}={key_v_s!r})' + with validate_context(context): + for item in self.items: + item.check(v) + + def apply_defaults(self, v): + ret = v.copy() + for item in self.items: + item.apply_default(ret) + return ret + + def remove_defaults(self, v): + ret = v.copy() + for item in self.items: + item.remove_default(ret) + return ret + + +class Array(collections.namedtuple('Array', ('of', 'allow_empty'))): + __slots__ = () + + def __new__(cls, of, allow_empty=True): + return super().__new__(cls, of=of, allow_empty=allow_empty) + + def check(self, v): + check_array(check_any)(v) + if not self.allow_empty and not v: + raise ValidationError( + f"Expected at least 1 '{self.of.object_name}'", + ) + for val in v: + validate(val, self.of) + + def apply_defaults(self, v): + return [apply_defaults(val, self.of) for val in v] + + def remove_defaults(self, v): + return [remove_defaults(val, self.of) for val in v] + + +class Not(collections.namedtuple('Not', ('val',))): + __slots__ = () + + def describe_opposite(self): + return f'is {self.val!r}' + + def __eq__(self, other): + return other is not MISSING and other != self.val + + +class NotIn(collections.namedtuple('NotIn', ('values',))): + __slots__ = () + + def __new__(cls, *values): + return super().__new__(cls, values=values) + + def describe_opposite(self): + return f'is any of {self.values!r}' + + def __eq__(self, other): + return other is not MISSING and other not in self.values + + +class In(collections.namedtuple('In', ('values',))): + __slots__ = () + + def __new__(cls, *values): + return super().__new__(cls, values=values) + + def describe_opposite(self): + return f'is not any of {self.values!r}' + + def __eq__(self, other): + return other is not MISSING and other in self.values + + +def check_any(_): + pass + + +def check_type(tp, typename=None): + def check_type_fn(v): + if not isinstance(v, tp): + typename_s = typename or tp.__name__ + raise ValidationError( + f'Expected {typename_s} got {type(v).__name__}', + ) + return check_type_fn + + +check_bool = check_type(bool) +check_bytes = check_type(bytes) +check_int = check_type(int) +check_string = check_type(str, typename='string') +check_text = check_type(str, typename='text') + + +def check_one_of(possible): + def check_one_of_fn(v): + if v not in possible: + possible_s = ', '.join(str(x) for x in sorted(possible)) + raise ValidationError( + f'Expected one of {possible_s} but got: {v!r}', + ) + return check_one_of_fn + + +def check_regex(v): + try: + re.compile(v) + except re.error: + raise ValidationError(f'{v!r} is not a valid python regex') + + +def check_array(inner_check): + def check_array_fn(v): + if not isinstance(v, (list, tuple)): + raise ValidationError( + f'Expected array but got {type(v).__name__!r}', + ) + + for i, val in enumerate(v): + with validate_context(f'At index {i}'): + inner_check(val) + return check_array_fn + + +def check_and(*fns): + def check(v): + for fn in fns: + fn(v) + return check + + +def validate(v, schema): + schema.check(v) + return v + + +def apply_defaults(v, schema): + return schema.apply_defaults(v) + + +def remove_defaults(v, schema): + return schema.remove_defaults(v) + + +def load_from_filename( + filename, + schema, + load_strategy, + exc_tp=ValidationError, + *, + display_filename=None, +): + display_filename = display_filename or filename + with reraise_as(exc_tp): + if not os.path.isfile(filename): + raise ValidationError(f'{display_filename} is not a file') + + with validate_context(f'File {display_filename}'): + try: + with open(filename, encoding='utf-8') as f: + contents = f.read() + except UnicodeDecodeError as e: + raise ValidationError(str(e)) + + try: + data = load_strategy(contents) + except Exception as e: + raise ValidationError(str(e)) + + validate(data, schema) + return apply_defaults(data, schema) diff --git a/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/INSTALLER b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/LICENSE b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/LICENSE new file mode 100644 index 00000000..925a7512 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 MinJae Kwon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/METADATA b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/METADATA new file mode 100644 index 00000000..8962c211 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/METADATA @@ -0,0 +1,117 @@ +Metadata-Version: 2.1 +Name: diagrams +Version: 0.24.4 +Summary: Diagram as Code +Home-page: https://diagrams.mingrammer.com +License: MIT +Author: mingrammer +Author-email: mingrammer@gmail.com +Requires-Python: >=3.9,<4.0 +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Dist: graphviz (>=0.13.2,<0.21.0) +Requires-Dist: jinja2 (>=2.10,<4.0) +Requires-Dist: pre-commit (>=4.0.1,<5.0.0) +Requires-Dist: typed-ast (>=1.5.5,<2.0.0) ; python_version < "3.8" +Project-URL: Repository, https://github.com/mingrammer/diagrams +Description-Content-Type: text/markdown + +![diagrams logo](assets/img/diagrams.png) + +# Diagrams + +[![license](https://img.shields.io/badge/license-MIT-blue.svg)](/LICENSE) +[![pypi version](https://badge.fury.io/py/diagrams.svg)](https://badge.fury.io/py/diagrams) +![python version](https://img.shields.io/badge/python-%3E%3D%203.9-blue?logo=python) +![Run tests](https://github.com/mingrammer/diagrams/workflows/Run%20tests/badge.svg?branch=master) +[![todos](https://badgen.net/https/api.tickgit.com/badgen/github.com/mingrammer/diagrams?label=todos)](https://www.tickgit.com/browse?repo=github.com/mingrammer/diagrams) +![contributors](https://img.shields.io/github/contributors/mingrammer/diagrams) + +Buy Me A Coffee + +**Diagram as Code**. + +Diagrams lets you draw the cloud system architecture **in Python code**. It was born for **prototyping** a new system architecture design without any design tools. You can also describe or visualize the existing system architecture as well. Diagrams currently supports main major providers including: `AWS`, `Azure`, `GCP`, `Kubernetes`, `Alibaba Cloud`, `Oracle Cloud` etc... It also supports `On-Premises` nodes, `SaaS` and major `Programming` frameworks and languages. + +**Diagram as Code** also allows you to **track** the architecture diagram changes in any **version control** system. + +> NOTE: It does not control any actual cloud resources nor does it generate cloud formation or terraform code. It is just for drawing the cloud system architecture diagrams. + +## Providers + +![aws provider](https://img.shields.io/badge/AWS-orange?logo=amazon-aws&color=ff9900) +![azure provider](https://img.shields.io/badge/Azure-orange?logo=microsoft-azure&color=0089d6) +![gcp provider](https://img.shields.io/badge/GCP-orange?logo=google-cloud&color=4285f4) +![ibm provider](https://img.shields.io/badge/IBM-orange?logo=ibm&color=052FAD) +![kubernetes provider](https://img.shields.io/badge/Kubernetes-orange?logo=kubernetes&color=326ce5) +![alibaba cloud provider](https://img.shields.io/badge/AlibabaCloud-orange?logo=alibaba-cloud&color=ff6a00) +![oracle cloud provider](https://img.shields.io/badge/OracleCloud-orange?logo=oracle&color=f80000) +![openstack provider](https://img.shields.io/badge/OpenStack-orange?logo=openstack&color=da1a32) +![firebase provider](https://img.shields.io/badge/Firebase-orange?logo=firebase&color=FFCA28) +![digital ocean provider](https://img.shields.io/badge/DigitalOcean-0080ff?logo=digitalocean&color=0080ff) +![elastic provider](https://img.shields.io/badge/Elastic-orange?logo=elastic&color=005571) +![outscale provider](https://img.shields.io/badge/OutScale-orange?color=5f87bf) +![on premises provider](https://img.shields.io/badge/OnPremises-orange?color=5f87bf) +![generic provider](https://img.shields.io/badge/Generic-orange?color=5f87bf) +![programming provider](https://img.shields.io/badge/Programming-orange?color=5f87bf) +![saas provider](https://img.shields.io/badge/SaaS-orange?color=5f87bf) +![c4 provider](https://img.shields.io/badge/C4-orange?color=5f87bf) + +## Getting Started + +It requires **Python 3.9** or higher, check your Python version first. + +It uses [Graphviz](https://www.graphviz.org/) to render the diagram, so you need to [install Graphviz](https://graphviz.gitlab.io/download/) to use **diagrams**. After installing graphviz (or already have it), install the **diagrams**. + +> macOS users can download the Graphviz via `brew install graphviz` if you're using [Homebrew](https://brew.sh). + +```shell +# using pip (pip3) +$ pip install diagrams + +# using pipenv +$ pipenv install diagrams + +# using poetry +$ poetry add diagrams +``` + +You can start with [quick start](https://diagrams.mingrammer.com/docs/getting-started/installation#quick-start). Check out [guides](https://diagrams.mingrammer.com/docs/guides/diagram) for more details, and you can find all available nodes list in [here](https://diagrams.mingrammer.com/docs/nodes/aws). + +## Examples + +| Event Processing | Stateful Architecture | Advanced Web Service | +| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | +| ![event processing](https://diagrams.mingrammer.com/img/event_processing_diagram.png) | ![stateful architecture](https://diagrams.mingrammer.com/img/stateful_architecture_diagram.png) | ![advanced web service with on-premises](https://diagrams.mingrammer.com/img/advanced_web_service_with_on-premises.png) | + +You can find all the examples on the [examples](https://diagrams.mingrammer.com/docs/getting-started/examples) page. + +## Contributing + +To contribute to diagram, check out [contribution guidelines](CONTRIBUTING.md). + +> Let me know if you are using diagrams! I'll add you in showcase page. (I'm working on it!) :) + +## Who uses it? + +[Apache Airflow](https://github.com/apache/airflow) is the most popular data workflow Orchestrator. Airflow uses Diagrams to generate architecture diagrams in their documentation. + +[Cloudiscovery](https://github.com/Cloud-Architects/cloudiscovery) helps you to analyze resources in your cloud (AWS/GCP/Azure/Alibaba/IBM) account. It allows you to create a diagram of analyzed cloud resource map based on this Diagrams library, so you can draw your existing cloud infrastructure with Cloudiscovery. + +[Airflow Diagrams](https://github.com/feluelle/airflow-diagrams) is an Airflow plugin that aims to easily visualise your Airflow DAGs on service level from providers like AWS, GCP, Azure, etc. via diagrams. + +[KubeDiagrams](https://github.com/philippemerle/KubeDiagrams) is a tool to generate Kubernetes architecture diagrams from Kubernetes manifest files, kustomization files, Helm charts, and actual cluster state. [KubeDiagrams](https://github.com/philippemerle/KubeDiagrams) supports all Kubernetes built-in resources, any custom resources, and label-based resource clustering. + +## Other languages + +- If you are familiar with Go, you can use [go-diagrams](https://github.com/blushft/go-diagrams) as well. + +## License + +[MIT](LICENSE) + diff --git a/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/RECORD b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/RECORD new file mode 100644 index 00000000..b462c886 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/RECORD @@ -0,0 +1,2314 @@ +../../Scripts/diagrams.exe,sha256=ucFTL0hdMKWpYTBlrgp5IboEPBHwy-ILc36dnd4x9Zw,108389 +diagrams-0.24.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +diagrams-0.24.4.dist-info/LICENSE,sha256=f_xnMQ-t-cTh8tPJW7OoOxF6kwVv4dQO8lkoZ3tMWjc,1068 +diagrams-0.24.4.dist-info/METADATA,sha256=83jUDM4g9Z-9SQTS43_iRj3huoNNZQJMqmMTAjek9ec,7290 +diagrams-0.24.4.dist-info/RECORD,, +diagrams-0.24.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +diagrams-0.24.4.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88 +diagrams-0.24.4.dist-info/entry_points.txt,sha256=vh4fikzwt83E4rv1ORPC2Bs5TtojvwmitBTGX46dOi0,46 +diagrams/__init__.py,sha256=Q11M99i_w2vW95mGeCfKLznnjEo7mIHx3E4D9VjAbVg,18956 +diagrams/__pycache__/__init__.cpython-312.pyc,, +diagrams/alibabacloud/__init__.py,sha256=xKjQ8Izu8AbT8D1U-ZQA4qAtMxgxaxGAn9i7oSLhhJM,299 +diagrams/alibabacloud/__pycache__/__init__.cpython-312.pyc,, +diagrams/alibabacloud/__pycache__/analytics.cpython-312.pyc,, +diagrams/alibabacloud/__pycache__/application.cpython-312.pyc,, +diagrams/alibabacloud/__pycache__/communication.cpython-312.pyc,, +diagrams/alibabacloud/__pycache__/compute.cpython-312.pyc,, +diagrams/alibabacloud/__pycache__/database.cpython-312.pyc,, +diagrams/alibabacloud/__pycache__/iot.cpython-312.pyc,, +diagrams/alibabacloud/__pycache__/network.cpython-312.pyc,, +diagrams/alibabacloud/__pycache__/security.cpython-312.pyc,, +diagrams/alibabacloud/__pycache__/storage.cpython-312.pyc,, +diagrams/alibabacloud/__pycache__/web.cpython-312.pyc,, +diagrams/alibabacloud/analytics.py,sha256=54aVfGpXx1TTFEQ6Au9Hzc4Xl5oeZHW9oS2UaUm0iws,556 +diagrams/alibabacloud/application.py,sha256=btSi_UHgOEipDR5GRgqvA7kMSczZID5pXxNvk3YVrcw,1374 +diagrams/alibabacloud/communication.py,sha256=xagZi0P9q2kMQaGwyeYo7pyqaRWF8eIRValRaXrDR7A,364 +diagrams/alibabacloud/compute.py,sha256=kOwrBuYB1I0RlNuc2yZA0R44tqm2qcnTu-5Dqnh1ji4,1714 +diagrams/alibabacloud/database.py,sha256=95KGpoIw7549x4kdaFFy4x1H_FQjhJR2My-E3tk99BU,1736 +diagrams/alibabacloud/iot.py,sha256=SY5TU4JwFRKe4VKO7076d4SikNIc4p8KTyt-aES-7Vg,483 +diagrams/alibabacloud/network.py,sha256=QWWJyABjR6E4uIF8Wm0TWwiFWO3ST9ew8-WkLGrSkMc,942 +diagrams/alibabacloud/security.py,sha256=L3arnr5Q1WDvM2pgZiWX0bpcdtteER0-26YK7Z2n9ys,1663 +diagrams/alibabacloud/storage.py,sha256=EeMTiUI7zFb7sVBBchCGggsFR4kPwIcMdJVaOmiWfYg,965 +diagrams/alibabacloud/web.py,sha256=RgGOkTzPoDmMh5emi2BoQA_mX3zKGzqSXu37xTCPVE4,290 +diagrams/aws/__init__.py,sha256=CSchNSV_Sqik8sW1kMOKjF6XbTxz_xgRjcs7W9UkaBs,241 +diagrams/aws/__pycache__/__init__.cpython-312.pyc,, +diagrams/aws/__pycache__/analytics.cpython-312.pyc,, +diagrams/aws/__pycache__/ar.cpython-312.pyc,, +diagrams/aws/__pycache__/blockchain.cpython-312.pyc,, +diagrams/aws/__pycache__/business.cpython-312.pyc,, +diagrams/aws/__pycache__/compute.cpython-312.pyc,, +diagrams/aws/__pycache__/cost.cpython-312.pyc,, +diagrams/aws/__pycache__/database.cpython-312.pyc,, +diagrams/aws/__pycache__/devtools.cpython-312.pyc,, +diagrams/aws/__pycache__/enablement.cpython-312.pyc,, +diagrams/aws/__pycache__/enduser.cpython-312.pyc,, +diagrams/aws/__pycache__/engagement.cpython-312.pyc,, +diagrams/aws/__pycache__/game.cpython-312.pyc,, +diagrams/aws/__pycache__/general.cpython-312.pyc,, +diagrams/aws/__pycache__/integration.cpython-312.pyc,, +diagrams/aws/__pycache__/iot.cpython-312.pyc,, +diagrams/aws/__pycache__/management.cpython-312.pyc,, +diagrams/aws/__pycache__/media.cpython-312.pyc,, +diagrams/aws/__pycache__/migration.cpython-312.pyc,, +diagrams/aws/__pycache__/ml.cpython-312.pyc,, +diagrams/aws/__pycache__/mobile.cpython-312.pyc,, +diagrams/aws/__pycache__/network.cpython-312.pyc,, +diagrams/aws/__pycache__/quantum.cpython-312.pyc,, +diagrams/aws/__pycache__/robotics.cpython-312.pyc,, +diagrams/aws/__pycache__/satellite.cpython-312.pyc,, +diagrams/aws/__pycache__/security.cpython-312.pyc,, +diagrams/aws/__pycache__/storage.cpython-312.pyc,, +diagrams/aws/analytics.py,sha256=56rYJ7TphKPUbEduPiz8uyxFlN-xOK60En_l6LW9PDg,2293 +diagrams/aws/ar.py,sha256=vAPlsvbdSZCE4LXfB5YROZLcVUwIoCvnTolj68W56SE,265 +diagrams/aws/blockchain.py,sha256=-6x4xll1kVW_erQS6pVsI1Fm1ugQkSEHxzsCgRu-rT8,543 +diagrams/aws/business.py,sha256=wx0_V9spINWpEdpxl8KM1hZ21iH8MMkASUkjbZrPxyk,475 +diagrams/aws/compute.py,sha256=vtUZbusREL3MvOuWBLLR9C-xdH4-0lrnYRtGnyJWnOQ,3248 +diagrams/aws/cost.py,sha256=ifR-2E3JQxKLG7FKn6RbDahdwd5nprTGtN3pqjUeL3k,578 +diagrams/aws/database.py,sha256=xKsfLmQuH4fbBLiby9d6gqFcnNAwDLCd1E34v4V99m8,3012 +diagrams/aws/devtools.py,sha256=rQ6HKiEtw8LxmQgvjttlLsC5o1DOMudh25ZIFabJZtY,1073 +diagrams/aws/enablement.py,sha256=1Kerw0lmGMm59fTjtLlRd1Ft1-9rqqaEYCZ8Bx46Uqc,533 +diagrams/aws/enduser.py,sha256=dW_cAeJ229riIFR_Tg1ilGT3-bNF9pX3ABN9E4QSvE4,505 +diagrams/aws/engagement.py,sha256=UtPUFF1nWJvqpjY1VjCVMtKjuyaalWrOWmMHu2x0KeA,603 +diagrams/aws/game.py,sha256=J9otTg0uPXcybP0-Uj_pT7BI2s7TWKFUBusulLmgMho,283 +diagrams/aws/general.py,sha256=Uz8ccVnp7QspjvqqSE1CL-mHNGCrzCj4HI_lvczK0Z4,1683 +diagrams/aws/integration.py,sha256=eNXD_u3zA7hdK909BSqspweJYWktDbpJ66PuVZJ10mg,1979 +diagrams/aws/iot.py,sha256=RPYLHV42Bagl88uLHhZ8utX96TFJTzIF5-0NC98hvUU,3956 +diagrams/aws/management.py,sha256=qTLvdzbHIPHoht7dYgdfkM2WNgMwgsFNBF5pv--1PlY,5013 +diagrams/aws/media.py,sha256=DsKrJ-Qo5w1VbSD4VSWxy1oc_orN2EWueqW41-EEPu8,1134 +diagrams/aws/migration.py,sha256=NqAA3Bkv0sPefJYHq1rwW8dQxa9Od0rKqN7r8AYuZAA,1230 +diagrams/aws/ml.py,sha256=ZburqaDVk3yorRKZk_dJektHNAPhBSANdJwT-i3fNxM,1915 +diagrams/aws/mobile.py,sha256=-UKBck8-E1GbPK5HVtBrH-4u5HPo4X13eS3DNZ-LG1w,586 +diagrams/aws/network.py,sha256=KHVVJ9i0I4eITFL6_7B7Cc1tp5HPhy5-E82w5wWLXOQ,3264 +diagrams/aws/quantum.py,sha256=7USBSic3iOxBbyqh23Jqf-1rXhBFOpoii-VEghw-HAg,316 +diagrams/aws/robotics.py,sha256=VYJiML0IcAS2_gFufVYogNjn5-dZb1E_BZ66aNZAIKI,670 +diagrams/aws/satellite.py,sha256=RdflihfZoYCvQFTbwkbKiUotJJiAQYsGU_VaOjOug3w,320 +diagrams/aws/security.py,sha256=1ngMglp5HZwiz36o8wfVDPxOz7HNsM-40XOxFPwdsOc,3967 +diagrams/aws/storage.py,sha256=vVKA83Jq7uBzJkl81t13YfazHpzqZrE0FH7fui_WcJk,2863 +diagrams/azure/__init__.py,sha256=ZzVTpQTc25PaaLuM6MnySAbsYIQIxcJmzt6O5uramoM,252 +diagrams/azure/__pycache__/__init__.cpython-312.pyc,, +diagrams/azure/__pycache__/analytics.cpython-312.pyc,, +diagrams/azure/__pycache__/compute.cpython-312.pyc,, +diagrams/azure/__pycache__/database.cpython-312.pyc,, +diagrams/azure/__pycache__/devops.cpython-312.pyc,, +diagrams/azure/__pycache__/general.cpython-312.pyc,, +diagrams/azure/__pycache__/identity.cpython-312.pyc,, +diagrams/azure/__pycache__/integration.cpython-312.pyc,, +diagrams/azure/__pycache__/iot.cpython-312.pyc,, +diagrams/azure/__pycache__/migration.cpython-312.pyc,, +diagrams/azure/__pycache__/ml.cpython-312.pyc,, +diagrams/azure/__pycache__/mobile.cpython-312.pyc,, +diagrams/azure/__pycache__/monitor.cpython-312.pyc,, +diagrams/azure/__pycache__/network.cpython-312.pyc,, +diagrams/azure/__pycache__/security.cpython-312.pyc,, +diagrams/azure/__pycache__/storage.cpython-312.pyc,, +diagrams/azure/__pycache__/web.cpython-312.pyc,, +diagrams/azure/analytics.py,sha256=hJXfa8J35ys1-rlEOyNvcwyQLKhYSvmMH8mV2MFsrfM,1092 +diagrams/azure/compute.py,sha256=6QEybzO0pA8K3E6FyZhgsn7w1sf3vYYI-dwXR9Hwaio,2376 +diagrams/azure/database.py,sha256=MDnhy3CiWjHwE0JzdZe1mPcrcLuunKjbFrQvsbPZ730,1931 +diagrams/azure/devops.py,sha256=PyBvQMEIjiJUyB4FvnxeRiZpZAAt-g7axRVh9G9nXK4,706 +diagrams/azure/general.py,sha256=IqWBOFF8E4aFJFgZJpIadak1Oibckw57b6jvoJbx1Wk,1773 +diagrams/azure/identity.py,sha256=5B_LZNp19kvCLLGT20o5nlpz1bJeIM4QPG-BTPTWNzE,1311 +diagrams/azure/integration.py,sha256=gU_E9mN6FizcUjPLQX76GzGXhS6NA6NiuwlIMEEie4I,1725 +diagrams/azure/iot.py,sha256=Ps6hHEFrBgyPdrsn2NK2SAIQX6Qb3E7d6xfdIpZ3N0E,900 +diagrams/azure/migration.py,sha256=Mswg3Rqf7tz_tcgL3Xs-H6n0wLvg-0e8jxHG7hLpnZw,576 +diagrams/azure/ml.py,sha256=5S-wMXNdK3mmE6dqKw9X5NdDCKglgPRTQxnKBAC9Xug,960 +diagrams/azure/mobile.py,sha256=v-W-S2Vskqe6NX1-zRtsUzo851tImtNZLIqnMSpUqgI,404 +diagrams/azure/monitor.py,sha256=nDrHUXKaP8kwMLVGVtdthSta3Z2Y6AJc8aSsQ_kB-Hs,414 +diagrams/azure/network.py,sha256=Asa6XEBCAsYVriiFUKrZvnIs8PHICOEI-VeFVW9f5wo,2247 +diagrams/azure/security.py,sha256=Z8_XZvBDFmVblNnG71KnOD8jRUekdhTQQfdO9eGhzRQ,691 +diagrams/azure/storage.py,sha256=BgvGpFiIxHQb7M3i_qxN7q1Gc3fjxH_1Nlu-2-2PFj4,1353 +diagrams/azure/web.py,sha256=qtTFXVIjIEI1p_hVQTvlP4HiToHoholukScBeZGGD6E,848 +diagrams/base/__init__.py,sha256=GTxqUK1T5s3tZkCENSoEDlf0bFsPOpJMp8DL1mldF7Y,207 +diagrams/base/__pycache__/__init__.cpython-312.pyc,, +diagrams/c4/__init__.py,sha256=AVXzqWZFGO_FBGNL-cbkae1FTDQgcptbiDZ82PZ-vR0,4157 +diagrams/c4/__pycache__/__init__.cpython-312.pyc,, +diagrams/custom/__init__.py,sha256=GAf20MzXGSClA9-S_l6hmyH4Tkku_qHvUc16ALJvZa4,420 +diagrams/custom/__pycache__/__init__.cpython-312.pyc,, +diagrams/digitalocean/__init__.py,sha256=Xdp2_VNd_eOML2SbazQ3KrAtKuxQB1fEkpeGpqXvPww,298 +diagrams/digitalocean/__pycache__/__init__.cpython-312.pyc,, +diagrams/digitalocean/__pycache__/compute.cpython-312.pyc,, +diagrams/digitalocean/__pycache__/database.cpython-312.pyc,, +diagrams/digitalocean/__pycache__/network.cpython-312.pyc,, +diagrams/digitalocean/__pycache__/storage.cpython-312.pyc,, +diagrams/digitalocean/compute.py,sha256=tHF6Kpk3WzlB-eO3x_YlM6vClejOXJR8AnrYcd9F-f4,692 +diagrams/digitalocean/database.py,sha256=E73PiB2oiW4lPAYIe4rbdBOK-Tw8Go0HO7-THH3DhDk,504 +diagrams/digitalocean/network.py,sha256=5hDG5jCL81GJkL_zrxps79IeGHGlx-XLthbHqyCBYRw,756 +diagrams/digitalocean/storage.py,sha256=sTKitKMz6YGMBDS4G7BKHAJmFiWd0f9UsgZdVQtaQJk,433 +diagrams/elastic/__init__.py,sha256=YWb5gcPPWPe4_WjW22yNk7A9_0-OAlo1dbqIpEVnWVU,253 +diagrams/elastic/__pycache__/__init__.cpython-312.pyc,, +diagrams/elastic/__pycache__/agent.cpython-312.pyc,, +diagrams/elastic/__pycache__/beats.cpython-312.pyc,, +diagrams/elastic/__pycache__/elasticsearch.cpython-312.pyc,, +diagrams/elastic/__pycache__/enterprisesearch.cpython-312.pyc,, +diagrams/elastic/__pycache__/observability.cpython-312.pyc,, +diagrams/elastic/__pycache__/orchestration.cpython-312.pyc,, +diagrams/elastic/__pycache__/saas.cpython-312.pyc,, +diagrams/elastic/__pycache__/security.cpython-312.pyc,, +diagrams/elastic/agent.py,sha256=T7UnE3y0goPT9NI29P7Wg5N9G-mjK3mSR3XD0pptMNA,401 +diagrams/elastic/beats.py,sha256=yD-K-Z7DJwEjROJg375bk2TqdzBhUxz3aHvYjB9yQSM,631 +diagrams/elastic/elasticsearch.py,sha256=hRReCNCGD5DgUJV6OTrTLiPkoKrWYkYY1Fs6C3eWt2w,1202 +diagrams/elastic/enterprisesearch.py,sha256=BoCXyA4329gU0xN9xqiP80E8yTIv53OB4rcck59-fCQ,584 +diagrams/elastic/observability.py,sha256=CceAtbdTIVbqPfoUVw-OrRnQrozn93ITdZVPEZlSE8A,508 +diagrams/elastic/orchestration.py,sha256=YlpByoX3dF1-AplLWDc-XmFjN0GX5sTdLUKX6k5tfTo,319 +diagrams/elastic/saas.py,sha256=kyhZ9wZsSu7QO5w_0lqUKLFSs6fZWuvD6I11LbsjO0w,286 +diagrams/elastic/security.py,sha256=bF74cLm1V3yLvS4_Q4BwqynJdLCgybgxWWr2n0h7gGw,408 +diagrams/firebase/__init__.py,sha256=tyP9QEgUZdw20dWKf52ZvM3hO27x1s0sxWwAktmULqI,266 +diagrams/firebase/__pycache__/__init__.cpython-312.pyc,, +diagrams/firebase/__pycache__/base.cpython-312.pyc,, +diagrams/firebase/__pycache__/develop.cpython-312.pyc,, +diagrams/firebase/__pycache__/extentions.cpython-312.pyc,, +diagrams/firebase/__pycache__/grow.cpython-312.pyc,, +diagrams/firebase/__pycache__/quality.cpython-312.pyc,, +diagrams/firebase/base.py,sha256=CylQpHlhQCxYztpM8I68O3_KK-oFb9Gpaggf5eV1z1c,245 +diagrams/firebase/develop.py,sha256=po34oOZlD0JLOhXaqCvC9wD5pkG7oLB960NFAKrv0F0,611 +diagrams/firebase/extentions.py,sha256=CJmXu9WDVAhdcFr1AK39DSneq4z2fzTQ5ot1l_titL8,273 +diagrams/firebase/grow.py,sha256=tWnxofCO9_HdwoE6UJ-H98KYJo-XLscSxYmualieGkk,674 +diagrams/firebase/quality.py,sha256=d43ShV3L1EHDwpLPhJu6ASqnqqIX1TIEmxoPGbumdLU,537 +diagrams/gcp/__init__.py,sha256=t1i8oaYV2liRae_8oLVRhXtOF9g0SsRFGVyMwy_Gzf4,244 +diagrams/gcp/__pycache__/__init__.cpython-312.pyc,, +diagrams/gcp/__pycache__/analytics.cpython-312.pyc,, +diagrams/gcp/__pycache__/api.cpython-312.pyc,, +diagrams/gcp/__pycache__/compute.cpython-312.pyc,, +diagrams/gcp/__pycache__/database.cpython-312.pyc,, +diagrams/gcp/__pycache__/devtools.cpython-312.pyc,, +diagrams/gcp/__pycache__/iot.cpython-312.pyc,, +diagrams/gcp/__pycache__/migration.cpython-312.pyc,, +diagrams/gcp/__pycache__/ml.cpython-312.pyc,, +diagrams/gcp/__pycache__/network.cpython-312.pyc,, +diagrams/gcp/__pycache__/operations.cpython-312.pyc,, +diagrams/gcp/__pycache__/security.cpython-312.pyc,, +diagrams/gcp/__pycache__/storage.cpython-312.pyc,, +diagrams/gcp/analytics.py,sha256=RBLOUMv058R2hOsgNmchAyYsI0ikGklTtsRLAVs4Slk,806 +diagrams/gcp/api.py,sha256=vvF547cXwpUzUol-MAFiAtU8o9Dn3XndW8meHEBplW0,331 +diagrams/gcp/compute.py,sha256=e6b9qvtTjCaqXOvXZd9SBu36e_7y1ov-X1e9rwvnomk,746 +diagrams/gcp/database.py,sha256=tzusqYGKJlPF27MRbM6Icx_xuL0K6_7iOxXoLtIotzY,545 +diagrams/gcp/devtools.py,sha256=Cnj95PzELsSfuM5EZJ_uc9W2gKb7ck5eKw1L-y4oS-c,1208 +diagrams/gcp/iot.py,sha256=KfZeTZZbTU7dVTq53lkK-IpxeufY4kRWuDJmUEbKsbs,225 +diagrams/gcp/migration.py,sha256=Ph9zr9chfbvK_ImlcK6Bqt7BaBRZbt0bkniJfhfuJFY,269 +diagrams/gcp/ml.py,sha256=bFF2hY7WwUJb1_CCJ6pQODEvnv7sRh_Q5QGwPrndSIo,1611 +diagrams/gcp/network.py,sha256=cwWS161YecdC_xT_Qx-px3tY7OfR2tmeB0zP8-2OJUo,1274 +diagrams/gcp/operations.py,sha256=iTytF8--lc3NXcNSv67A-xtMiYtKb0-DFCewpH5SQuQ,314 +diagrams/gcp/security.py,sha256=YbtCgabwyJC1jHyhawmiCOyq6Yvx1FpJ2PsKdwiBP2E,646 +diagrams/gcp/storage.py,sha256=wTTk3MW5OxnxKUeuSkODTdW3ZfP3Ptti63fy9dQx2xw,380 +diagrams/generic/__init__.py,sha256=lmiA-__HzFSiVdb2JxUIONRBnyMvrDEq3NPaN-kHyIo,277 +diagrams/generic/__pycache__/__init__.cpython-312.pyc,, +diagrams/generic/__pycache__/blank.cpython-312.pyc,, +diagrams/generic/__pycache__/compute.cpython-312.pyc,, +diagrams/generic/__pycache__/database.cpython-312.pyc,, +diagrams/generic/__pycache__/device.cpython-312.pyc,, +diagrams/generic/__pycache__/network.cpython-312.pyc,, +diagrams/generic/__pycache__/os.cpython-312.pyc,, +diagrams/generic/__pycache__/place.cpython-312.pyc,, +diagrams/generic/__pycache__/storage.cpython-312.pyc,, +diagrams/generic/__pycache__/virtualization.cpython-312.pyc,, +diagrams/generic/blank.py,sha256=S9SSBYF-8NcKxO_fLYVkverTHJqX1yCvcQK9KRQHL3Q,240 +diagrams/generic/compute.py,sha256=45SOo_mZreTMRV6mlEeiFKBqZPCyg81BxBr1LQPloCI,246 +diagrams/generic/database.py,sha256=PrQuZ-u4RzUlFxs73FQP-_8oeMbs_OMP8xAm79B6AuQ,248 +diagrams/generic/device.py,sha256=SZzag6MFPwP-urYdEczh74-SZ2-vK3S8fZ0heZKfMy0,296 +diagrams/generic/network.py,sha256=68mUbBsm5BR2V_MzZE_bSsQOO5BJCFlVfXgWNfhfJU8,452 +diagrams/generic/os.py,sha256=1SFIFk-xgfYUa4xNgAo-CKvVHSZiiOjHKdgpOpbGYMc,656 +diagrams/generic/place.py,sha256=IeCent_2InovuhOW2py8iXm4sP82uYHycpfYzVCHW3s,250 +diagrams/generic/storage.py,sha256=hdybVUiVcomBzh-ZvzEL7DSML-oLD4l6bxm84k6DNHA,252 +diagrams/generic/virtualization.py,sha256=8yfIdYtJW8ueiteb_W7F-mA_aY7v_uEBrFzFMMe-juM,450 +diagrams/gis/__init__.py,sha256=9gGo1qURxAcH7f0CXL6Uf3lO6UHiNRE6UtkFYxVyDJg,212 +diagrams/gis/__pycache__/__init__.cpython-312.pyc,, +diagrams/gis/__pycache__/cli.cpython-312.pyc,, +diagrams/gis/__pycache__/cplusplus.cpython-312.pyc,, +diagrams/gis/__pycache__/data.cpython-312.pyc,, +diagrams/gis/__pycache__/database.cpython-312.pyc,, +diagrams/gis/__pycache__/desktop.cpython-312.pyc,, +diagrams/gis/__pycache__/format.cpython-312.pyc,, +diagrams/gis/__pycache__/geocoding.cpython-312.pyc,, +diagrams/gis/__pycache__/georchestra.cpython-312.pyc,, +diagrams/gis/__pycache__/java.cpython-312.pyc,, +diagrams/gis/__pycache__/javascript.cpython-312.pyc,, +diagrams/gis/__pycache__/mobile.cpython-312.pyc,, +diagrams/gis/__pycache__/ogc.cpython-312.pyc,, +diagrams/gis/__pycache__/organization.cpython-312.pyc,, +diagrams/gis/__pycache__/python.cpython-312.pyc,, +diagrams/gis/__pycache__/routing.cpython-312.pyc,, +diagrams/gis/__pycache__/server.cpython-312.pyc,, +diagrams/gis/__pycache__/toolkit.cpython-312.pyc,, +diagrams/gis/cli.py,sha256=ISezN-Al7uC_zfZc320v8zPK2L0TOSDFso83hTNkjNE,449 +diagrams/gis/cplusplus.py,sha256=REuGpfNSgnbuU9PuSQTmRCGp2tUtQYhVumtQfUAIXCw,246 +diagrams/gis/data.py,sha256=cgCE1eKA7qMvwJgUWNqSsrX1ClM2xQgXyBuxJlZs_gI,368 +diagrams/gis/database.py,sha256=4gmOvjNdpack_wTb-6yRXMFIfX9qtoEQ1xyo5SSXn-U,244 +diagrams/gis/desktop.py,sha256=2oR_9HCxEtGUMaeQypWK7dqpYm7Z3NmlqnyYhRgkOsM,289 +diagrams/gis/format.py,sha256=uSrj2x8wrgaZtOfz5y8780dKr1wahQ6WbLjurD3aY_M,300 +diagrams/gis/geocoding.py,sha256=ELB6O5-yvSIZAT0zc9nSzFG3u0oIbeE7hxI15jxp8HI,415 +diagrams/gis/georchestra.py,sha256=TkifIXTTj5z1J_Pz_dd2-32nVfOJ9iufuxQu8bcWifQ,199 +diagrams/gis/java.py,sha256=mEx4jugBh_gdurM5GmmULxraCM-H8ENOPrUGli3i4m8,230 +diagrams/gis/javascript.py,sha256=7QesM6z3p5AZlplLmqYQCuoA1LQr-LZXFQGmUNdCI9k,651 +diagrams/gis/mobile.py,sha256=hR0v7gypeayej406mR2I5TmGcIQkp1dL_EB0hcS_KX4,332 +diagrams/gis/ogc.py,sha256=jnlb2ehOrnvsOTVlirx6Zmy7JPjS-W9ujAYbaMEJVbc,298 +diagrams/gis/organization.py,sha256=z4F4eAxc5mwqIHh87mMRVw9BpDM2FozYriLfdZTISOA,256 +diagrams/gis/python.py,sha256=qkMHZHpZ8OTIy69_2IPb5qnBl-smHK8Fg88yeLuwb8M,288 +diagrams/gis/routing.py,sha256=R8tJ7tiyw2AwCe9C9eh26hSyB8fXrLt1pX1kCREEl9E,407 +diagrams/gis/server.py,sha256=NkqWypNZNdfxWnCkMEo1l7J9kzvajTb8Kl8H_1gqU-Y,1404 +diagrams/gis/toolkit.py,sha256=jmNaT82SUoWjN9deXWWXzQajuC_Hwsai-zehbVriq0g,187 +diagrams/ibm/__init__.py,sha256=IdT6IJWBLEnccGpxIIKDP0AqbdZbxMBGWtphKySVLvw,232 +diagrams/ibm/__pycache__/__init__.cpython-312.pyc,, +diagrams/ibm/__pycache__/analytics.cpython-312.pyc,, +diagrams/ibm/__pycache__/applications.cpython-312.pyc,, +diagrams/ibm/__pycache__/blockchain.cpython-312.pyc,, +diagrams/ibm/__pycache__/compute.cpython-312.pyc,, +diagrams/ibm/__pycache__/data.cpython-312.pyc,, +diagrams/ibm/__pycache__/devops.cpython-312.pyc,, +diagrams/ibm/__pycache__/general.cpython-312.pyc,, +diagrams/ibm/__pycache__/infrastructure.cpython-312.pyc,, +diagrams/ibm/__pycache__/management.cpython-312.pyc,, +diagrams/ibm/__pycache__/network.cpython-312.pyc,, +diagrams/ibm/__pycache__/security.cpython-312.pyc,, +diagrams/ibm/__pycache__/social.cpython-312.pyc,, +diagrams/ibm/__pycache__/storage.cpython-312.pyc,, +diagrams/ibm/__pycache__/user.cpython-312.pyc,, +diagrams/ibm/analytics.py,sha256=aPNlkeeODc0XrPxLdShWFNVF17iNJ1HQWlEKUAbssSM,548 +diagrams/ibm/applications.py,sha256=kgEr8oag7KHOVJ33FaQhMLpN_jhwRDIlfUvOdjPa1hg,1572 +diagrams/ibm/blockchain.py,sha256=EZ-syung3wMJoE0pmizveEX1T0L7pShyKZzcGJLr9kU,1580 +diagrams/ibm/compute.py,sha256=-jOiTQ4vHhtSLTx68Q0hdK_0xI3EwxBYrytajCNdQ9A,488 +diagrams/ibm/data.py,sha256=CrR2qenw7MtsXaYgyS6QODUpVZH3GvBivpZ_TgiajxM,1018 +diagrams/ibm/devops.py,sha256=vukkdzMwnH8lBXzESVu-pv6YvobrwaMZ0m-N3iv6NVg,870 +diagrams/ibm/general.py,sha256=O4SrQh9yEo9NVUSfRQrr7giiMIiYMemKKiYdLqsNmrY,2082 +diagrams/ibm/infrastructure.py,sha256=GbR4OmN4maqQnJ7KfMacseAIwdIQ2AC7tVyO0ccHki8,1641 +diagrams/ibm/management.py,sha256=PZzfZjYEBgtbLp2mWpbhOKnt2G7_UoEYd_0Gmc22QOw,1356 +diagrams/ibm/network.py,sha256=EC-8f-CaKuTKIcvj36fxiG_J5JLZydHGv-oNpBF1Pg8,1461 +diagrams/ibm/security.py,sha256=5wZ8g1z0te_IxsGVM6coTfktDSqBTe8nzQK6H1_wwRI,1227 +diagrams/ibm/social.py,sha256=6FoXaoLp-JT7M9K7SSXuX_keE1l2K4CzZAjAdcgdQWw,486 +diagrams/ibm/storage.py,sha256=CBu-GC9CRZy38IKNju6iJHH5AQbVh-HDXkhARJNqzq8,317 +diagrams/ibm/user.py,sha256=Sr2YvHf7dY3mX8HyjpCMKfl0q1B5g5mwAZI2Ot8UUrQ,527 +diagrams/k8s/__init__.py,sha256=tOXrHQkMvclNVXC6QPMlaQykza0UK85Z2Kv0QqzUS-M,224 +diagrams/k8s/__pycache__/__init__.cpython-312.pyc,, +diagrams/k8s/__pycache__/chaos.cpython-312.pyc,, +diagrams/k8s/__pycache__/clusterconfig.cpython-312.pyc,, +diagrams/k8s/__pycache__/compute.cpython-312.pyc,, +diagrams/k8s/__pycache__/controlplane.cpython-312.pyc,, +diagrams/k8s/__pycache__/ecosystem.cpython-312.pyc,, +diagrams/k8s/__pycache__/group.cpython-312.pyc,, +diagrams/k8s/__pycache__/infra.cpython-312.pyc,, +diagrams/k8s/__pycache__/network.cpython-312.pyc,, +diagrams/k8s/__pycache__/others.cpython-312.pyc,, +diagrams/k8s/__pycache__/podconfig.cpython-312.pyc,, +diagrams/k8s/__pycache__/rbac.cpython-312.pyc,, +diagrams/k8s/__pycache__/storage.cpython-312.pyc,, +diagrams/k8s/chaos.py,sha256=UCPyPj2pUVw4wrkpfyORjDowwCc2sNiV-x30gGFhZWM,297 +diagrams/k8s/clusterconfig.py,sha256=r0xgmCkW1Vz958A73A2qKHEdZ-9zxWEAefYneky2acE,419 +diagrams/k8s/compute.py,sha256=AZoIYpPGR63lJkpqplU7OxXzHRd_uWzcGucAoeiIrYY,582 +diagrams/k8s/controlplane.py,sha256=CT4b0CR1K-7eL4jLh8RQjhCFa92i8eahm1zCl5fqUXc,599 +diagrams/k8s/ecosystem.py,sha256=RYpZa9mJ0SOFSCK1dQkFSSrEbmFY7qC4mDWZJ0Fgn1I,414 +diagrams/k8s/group.py,sha256=WElkxBO7lFF0zb8KA-XIT6KxLerF09QC7VfnPDM4yL4,238 +diagrams/k8s/infra.py,sha256=amFm4bvmc1tuZl-3ZXpHoRV_XJitlKkVyZiObfnRJNE,320 +diagrams/k8s/network.py,sha256=SAvmLdHyUZt83_Qc0-UH6ktFwhzUZQkT6y5byQgre5Y,437 +diagrams/k8s/others.py,sha256=1Q1Mt9yxNoECJY32uTCRPVmTMFqKPFdPXWkYXGqL8q0,272 +diagrams/k8s/podconfig.py,sha256=2iwRq83eSJJaxDGUwPwN0zW0yNFSONlDfEdBrLu6API,307 +diagrams/k8s/rbac.py,sha256=UmpWvSYSGyHPSR8tsHa_BeVObAuig8DTa-IZ68V2OKw,564 +diagrams/k8s/storage.py,sha256=pucvWzAfic6I0xe_tc3i2jSi_jAsVSraM49pOab7KCU,445 +diagrams/oci/__init__.py,sha256=Smo-X41BEecFokVGrfHYHzflYoZ7DfKB1AGxfaKT4xw,250 +diagrams/oci/__pycache__/__init__.cpython-312.pyc,, +diagrams/oci/__pycache__/compute.cpython-312.pyc,, +diagrams/oci/__pycache__/connectivity.cpython-312.pyc,, +diagrams/oci/__pycache__/database.cpython-312.pyc,, +diagrams/oci/__pycache__/devops.cpython-312.pyc,, +diagrams/oci/__pycache__/governance.cpython-312.pyc,, +diagrams/oci/__pycache__/monitoring.cpython-312.pyc,, +diagrams/oci/__pycache__/network.cpython-312.pyc,, +diagrams/oci/__pycache__/security.cpython-312.pyc,, +diagrams/oci/__pycache__/storage.cpython-312.pyc,, +diagrams/oci/compute.py,sha256=D_222z3xMnHi5B_S0yhEUp6J4t22Oqu92lFQh2BC-IQ,1298 +diagrams/oci/connectivity.py,sha256=SamqL3IjDWbJoT6XpoTFES8xjZzPtCz3VNQ2KTZzVsM,1463 +diagrams/oci/database.py,sha256=DsDaDLeKCdHgqu1W7CF_BllmdxGs7jn3Gsvcj7f6YWg,1430 +diagrams/oci/devops.py,sha256=NZxu_t7oaRSw1mIsgRpG6uCcC0c79iTA6aC2yb1wqRw,579 +diagrams/oci/governance.py,sha256=-tZlH0RG4ujQRwTO7K3ycvzH5gToUaPFhsFQseuvPZs,1057 +diagrams/oci/monitoring.py,sha256=lvYeLtFIfy_9NYjMgcQATWjAM2k4MKXFeGQojwl2hKQ,1325 +diagrams/oci/network.py,sha256=B28riYl2HpSIfdZnJUrtV4S_0PWRos-2HQQvWWNs6OM,1221 +diagrams/oci/security.py,sha256=79gDqq64G4T2vmVskrCKr38PeXAyJcPGa393tZBNxFI,1200 +diagrams/oci/storage.py,sha256=JSNEKl0t8MBmYKj95dtGNvyxOFOdACPMs1eaviHLV5s,1474 +diagrams/onprem/__init__.py,sha256=UQ0_NuaE1zQ0pqXh-b36JMY-8eNrPwHMEDpiYgWluhs,250 +diagrams/onprem/__pycache__/__init__.cpython-312.pyc,, +diagrams/onprem/__pycache__/aggregator.cpython-312.pyc,, +diagrams/onprem/__pycache__/analytics.cpython-312.pyc,, +diagrams/onprem/__pycache__/auth.cpython-312.pyc,, +diagrams/onprem/__pycache__/cd.cpython-312.pyc,, +diagrams/onprem/__pycache__/certificates.cpython-312.pyc,, +diagrams/onprem/__pycache__/ci.cpython-312.pyc,, +diagrams/onprem/__pycache__/client.cpython-312.pyc,, +diagrams/onprem/__pycache__/compute.cpython-312.pyc,, +diagrams/onprem/__pycache__/container.cpython-312.pyc,, +diagrams/onprem/__pycache__/database.cpython-312.pyc,, +diagrams/onprem/__pycache__/dns.cpython-312.pyc,, +diagrams/onprem/__pycache__/etl.cpython-312.pyc,, +diagrams/onprem/__pycache__/gitops.cpython-312.pyc,, +diagrams/onprem/__pycache__/groupware.cpython-312.pyc,, +diagrams/onprem/__pycache__/iac.cpython-312.pyc,, +diagrams/onprem/__pycache__/identity.cpython-312.pyc,, +diagrams/onprem/__pycache__/inmemory.cpython-312.pyc,, +diagrams/onprem/__pycache__/logging.cpython-312.pyc,, +diagrams/onprem/__pycache__/messaging.cpython-312.pyc,, +diagrams/onprem/__pycache__/mlops.cpython-312.pyc,, +diagrams/onprem/__pycache__/monitoring.cpython-312.pyc,, +diagrams/onprem/__pycache__/network.cpython-312.pyc,, +diagrams/onprem/__pycache__/proxmox.cpython-312.pyc,, +diagrams/onprem/__pycache__/queue.cpython-312.pyc,, +diagrams/onprem/__pycache__/registry.cpython-312.pyc,, +diagrams/onprem/__pycache__/search.cpython-312.pyc,, +diagrams/onprem/__pycache__/security.cpython-312.pyc,, +diagrams/onprem/__pycache__/storage.cpython-312.pyc,, +diagrams/onprem/__pycache__/tracing.cpython-312.pyc,, +diagrams/onprem/__pycache__/vcs.cpython-312.pyc,, +diagrams/onprem/__pycache__/workflow.cpython-312.pyc,, +diagrams/onprem/aggregator.py,sha256=IEzlNhGMXj8lfHmq12AYIyaYCZSJRqlzVZ4GaNZNqY0,315 +diagrams/onprem/analytics.py,sha256=zWGJ4wpK1vhGTUeL1PXAxXR3WhsiBLGqba602VyOKsw,1122 +diagrams/onprem/auth.py,sha256=Nc8_iON6FBImiIsFfrNutAxRnyR-0RMLNB84I-sKaxc,357 +diagrams/onprem/cd.py,sha256=538OXVYOTuR-6GzRX2uSsltqrcfroqGTgqTchKIAcaU,332 +diagrams/onprem/certificates.py,sha256=llC0QMFQ8jORTS4WxtGZnnxbRZYRiECao8agt5w7IgM,345 +diagrams/onprem/ci.py,sha256=p-cKfcGfOygL2tCr2EQXpmN85NOPIG_xwzyDBMvIURk,775 +diagrams/onprem/client.py,sha256=7NX52R9-ylHNVVyhBYZ6Iu0bMz-IzbHUR26Ir8eBwgU,337 +diagrams/onprem/compute.py,sha256=-2YqtTN3Ivwbx9_NYWn-SNdjAa4wp7nLiUc1iAYXDHo,296 +diagrams/onprem/container.py,sha256=Mjj8rcavSLPW9obMgdkAAEkzUfkv1uWO0Rrl7KVm9pI,643 +diagrams/onprem/database.py,sha256=a-tIkFeikAbxPrMfA2QfoiKUicVQUU0SwI8RNZPir5g,1396 +diagrams/onprem/dns.py,sha256=E75P8C1kAzDuOzw1OserQYgZgvl5ZuGZ9Qfv6kXjUzk,284 +diagrams/onprem/etl.py,sha256=3bkt1mwrFgw_Jwwy3xgfn_Ocz7k06z8GMJnDaHwEMwE,231 +diagrams/onprem/gitops.py,sha256=QfhhIwhWnrNpg-_MJEDXOsOY_qqekH5xIGE6pJgCPQs,358 +diagrams/onprem/groupware.py,sha256=MWCdZOVAGFE5K3tmRcD4JjVMb_se2FiUHBxroMsXBSc,261 +diagrams/onprem/iac.py,sha256=eBdUZrkhl_2Im2OCe78NvmuuvQw6mkwXKxiVQ3oHf1g,472 +diagrams/onprem/identity.py,sha256=qUv_WyVSn6jM-puHNhUVIyf-XFp_tDF6goAmUz1Qjtw,245 +diagrams/onprem/inmemory.py,sha256=NaLhflI4IU-7YP3fyuRBSNP7WUjiYdiWjIuNCHVFITs,423 +diagrams/onprem/logging.py,sha256=Htt6D_Azse2xaCxb-2LjRZZp6DyrV48QGudG0tJ-_-c,503 +diagrams/onprem/messaging.py,sha256=3tztZVOT4uDZmU33LdVio3h9HfVSIwa2stRZk_VOwFU,263 +diagrams/onprem/mlops.py,sha256=YTzmJtveXfkoiK5TRgkubmXeTu5FPtwnOYW0y2_kEX0,292 +diagrams/onprem/monitoring.py,sha256=4trktxt1X1S_xaKpJGZOImU-x2zalQEso_PFTxO50Ms,1004 +diagrams/onprem/network.py,sha256=PLb3loElKRjAaqaGmSEgxBz8Re5LuU93YNkIn5lwUEM,1923 +diagrams/onprem/proxmox.py,sha256=XUuut8LJaJGRVG1KcnObIivmx8iTkymwt0tSAR3OZow,258 +diagrams/onprem/queue.py,sha256=hQX1mz02KaIQWvMsiI0nNc5kQ15Kb77PFloB9Ebdbi0,600 +diagrams/onprem/registry.py,sha256=l1n3l_vsycoKz-ERzz_yOUe3w_NOjjxi4yWdqC1VmLE,301 +diagrams/onprem/search.py,sha256=VE7UdInpZPD7gBtFdNHqBBKbNBPz2_YSTnrMqbKEpeo,239 +diagrams/onprem/security.py,sha256=w_iKn3yp8Cy__ey7rkpKFVsmMMYR0WvwpRUcz8wFYWE,357 +diagrams/onprem/storage.py,sha256=HqeIfwez3vZ55Y_CGU8SA9eXWtMpdGXLmKd3i9EuKos,441 +diagrams/onprem/tracing.py,sha256=bdj4Vwgb9_7vVyBxGAU9xsU3-6ajl84zyH-UioDCWyQ,296 +diagrams/onprem/vcs.py,sha256=ydMWTBOWmzMHi5tO74EeIlQ1bZAgd95a-qj1vmDQQpM,405 +diagrams/onprem/workflow.py,sha256=9683rqtx-beszcn92ds60ATI1l4aEyZnCQbStRZcHHM,442 +diagrams/openstack/__init__.py,sha256=_444CLWHJBDzufJOrxmHQuipIftHzNcAOOm9DUTf0nA,269 +diagrams/openstack/__pycache__/__init__.cpython-312.pyc,, +diagrams/openstack/__pycache__/adjacentenablers.cpython-312.pyc,, +diagrams/openstack/__pycache__/apiproxies.cpython-312.pyc,, +diagrams/openstack/__pycache__/applicationlifecycle.cpython-312.pyc,, +diagrams/openstack/__pycache__/baremetal.cpython-312.pyc,, +diagrams/openstack/__pycache__/billing.cpython-312.pyc,, +diagrams/openstack/__pycache__/compute.cpython-312.pyc,, +diagrams/openstack/__pycache__/containerservices.cpython-312.pyc,, +diagrams/openstack/__pycache__/deployment.cpython-312.pyc,, +diagrams/openstack/__pycache__/frontend.cpython-312.pyc,, +diagrams/openstack/__pycache__/lifecyclemanagement.cpython-312.pyc,, +diagrams/openstack/__pycache__/monitoring.cpython-312.pyc,, +diagrams/openstack/__pycache__/multiregion.cpython-312.pyc,, +diagrams/openstack/__pycache__/networking.cpython-312.pyc,, +diagrams/openstack/__pycache__/nfv.cpython-312.pyc,, +diagrams/openstack/__pycache__/operations.cpython-312.pyc,, +diagrams/openstack/__pycache__/optimization.cpython-312.pyc,, +diagrams/openstack/__pycache__/orchestration.cpython-312.pyc,, +diagrams/openstack/__pycache__/packaging.cpython-312.pyc,, +diagrams/openstack/__pycache__/sharedservices.cpython-312.pyc,, +diagrams/openstack/__pycache__/storage.cpython-312.pyc,, +diagrams/openstack/__pycache__/user.cpython-312.pyc,, +diagrams/openstack/__pycache__/workloadprovisioning.cpython-312.pyc,, +diagrams/openstack/adjacentenablers.py,sha256=EvQPzFxNjOoEXzz6vCilhW27h_3bOFInLOetn6O5WyE,232 +diagrams/openstack/apiproxies.py,sha256=SiVjumS3JxandjrI50Y160qGcR11nAgn03XFWhxyLpo,268 +diagrams/openstack/applicationlifecycle.py,sha256=4pTyUBg7GG7rJMQxZxYFE1iHcIt_UVf8jDguJ7m5U5E,504 +diagrams/openstack/baremetal.py,sha256=1WWp1TE_iUcGDaPpCIZuvzDU2VPNa4jf6JRooJjf2kU,317 +diagrams/openstack/billing.py,sha256=TD1uMFzgED5A4TZbLLhyMK-GliYdzrXhPkJ4X-YFhww,289 +diagrams/openstack/compute.py,sha256=0SvcyspR1w8vpCeYEdGFLZPwFO70wDMFTidn5diJuCY,350 +diagrams/openstack/containerservices.py,sha256=YqrDwanGORKMTEE975H5bGzSIwGga8fdfUjzcpyyh_c,294 +diagrams/openstack/deployment.py,sha256=uLrxzfRHqu9guZmfmKKCJg8bbGIykwXEqXC9eX5-YIY,572 +diagrams/openstack/frontend.py,sha256=2LwireIB2vncsl_xSfrKAGNvKBzG7UuV2OlHjaFLT68,262 +diagrams/openstack/lifecyclemanagement.py,sha256=1CJC-G-5T3DglBcZEM05IEWbJHSQEetdCHg0b5DWWzk,241 +diagrams/openstack/monitoring.py,sha256=wxk85FfwqtL0cYaSBOpYIuYRLYqHenhx1LQJVXMaTIE,330 +diagrams/openstack/multiregion.py,sha256=UEr6YvEFgTdueBnZX5_EGXWKlAHz_PcxtgIXTcrxZww,278 +diagrams/openstack/networking.py,sha256=L5rlDU_BIUD7cp_K0SY2gMo6XrBmfKXfDXnEci833S4,386 +diagrams/openstack/nfv.py,sha256=ryVOsisR0n-QKRsWXjJIVo-NODUjkbuHHbNmtfP8nZM,240 +diagrams/openstack/operations.py,sha256=qf5XepwA8g0N_png40s8hh8B5qGc8As3qvyOP93P4ak,214 +diagrams/openstack/optimization.py,sha256=zYxNNrE7l7lvB84zbwAKoCznVWETJBxRcYLSrMn98FA,450 +diagrams/openstack/orchestration.py,sha256=zUHQZr9uh5hKHxFYXHEYu0NPk-1MeYNk7Xzwcy-cuxM,504 +diagrams/openstack/packaging.py,sha256=F58W3q0v1LtyJRgKcXJHCQbZfVFT7gIsqleJIJZ0ETw,360 +diagrams/openstack/sharedservices.py,sha256=cntt79FiprfvAcytDjBQW4R0NQjRUwsZ6rla7OT4jlk,534 +diagrams/openstack/storage.py,sha256=raMaUrOiwBjztcIijlZ7XyOcBwOP63aYd3sg5hwPGAk,356 +diagrams/openstack/user.py,sha256=rpvW-bq-ZND4RyPZvTGGbRLeZ2zBlfdJfmbP7mrJre4,297 +diagrams/openstack/workloadprovisioning.py,sha256=aoJnH4CHFz5zjbzNRlq0dtPjydkYhM0cJ48dEkiGWlo,434 +diagrams/outscale/__init__.py,sha256=pQdWJXPKk3Zrkz18lAvR_PGAOjqdbnGa_uOXXERmr7U,198 +diagrams/outscale/__pycache__/__init__.cpython-312.pyc,, +diagrams/outscale/__pycache__/compute.cpython-312.pyc,, +diagrams/outscale/__pycache__/network.cpython-312.pyc,, +diagrams/outscale/__pycache__/security.cpython-312.pyc,, +diagrams/outscale/__pycache__/storage.cpython-312.pyc,, +diagrams/outscale/compute.py,sha256=_e88k201KtAVArhXTLRRIoSJjqGJwKqM38YZm_uLHjE,321 +diagrams/outscale/network.py,sha256=Y0l4bf1rMRNpTWj-KlvrRuBNSCMhtO0aGRwBehUTyNo,569 +diagrams/outscale/security.py,sha256=AgrRweSpbZZ9DvB9Os9CcrWFkrL2GjtnIgGiQIymmiM,358 +diagrams/outscale/storage.py,sha256=AtwP8KvtX16R9Qb0k0dBJ07micfIZasWmYqpDtZKjdA,336 +diagrams/programming/__init__.py,sha256=7wZTP5Zcj1Y-NlFtBi-sbikKT0i_o-fbrUjF_eMdhlE,293 +diagrams/programming/__pycache__/__init__.cpython-312.pyc,, +diagrams/programming/__pycache__/flowchart.cpython-312.pyc,, +diagrams/programming/__pycache__/framework.cpython-312.pyc,, +diagrams/programming/__pycache__/language.cpython-312.pyc,, +diagrams/programming/__pycache__/runtime.cpython-312.pyc,, +diagrams/programming/flowchart.py,sha256=4zjihMEr8rDyffu-XMrbl6GsGrGVjBWlXZ-EX8-NRs0,1707 +diagrams/programming/framework.py,sha256=F6k2ZDXMcd_kILL0Z10kKhGW-pa4mBHzIpO0cE9PZSw,1637 +diagrams/programming/language.py,sha256=y4AV6dIn6ynyVY0FNfpqv8q1HoRWWE-GGndOwQNWXFU,1429 +diagrams/programming/runtime.py,sha256=i6fB7YhMByIVx9lwH8J_5totEQIsBNcTBdkGkvPgRXA,258 +diagrams/saas/__init__.py,sha256=bI8ymP3p6SaW67NruIQ190SRjYmE-u-KytPk3AT7nqc,229 +diagrams/saas/__pycache__/__init__.cpython-312.pyc,, +diagrams/saas/__pycache__/alerting.cpython-312.pyc,, +diagrams/saas/__pycache__/analytics.cpython-312.pyc,, +diagrams/saas/__pycache__/automation.cpython-312.pyc,, +diagrams/saas/__pycache__/cdn.cpython-312.pyc,, +diagrams/saas/__pycache__/chat.cpython-312.pyc,, +diagrams/saas/__pycache__/communication.cpython-312.pyc,, +diagrams/saas/__pycache__/crm.cpython-312.pyc,, +diagrams/saas/__pycache__/filesharing.cpython-312.pyc,, +diagrams/saas/__pycache__/identity.cpython-312.pyc,, +diagrams/saas/__pycache__/logging.cpython-312.pyc,, +diagrams/saas/__pycache__/media.cpython-312.pyc,, +diagrams/saas/__pycache__/recommendation.cpython-312.pyc,, +diagrams/saas/__pycache__/security.cpython-312.pyc,, +diagrams/saas/__pycache__/social.cpython-312.pyc,, +diagrams/saas/alerting.py,sha256=xMsCsBbpACR8-sec2bjp25eHnqA1-miw3eTOen8wkD4,475 +diagrams/saas/analytics.py,sha256=vFEq1ilwqrkG8tdms1_vbROZEpGnmXfe_U5k_Gfk-1k,365 +diagrams/saas/automation.py,sha256=dtGsXq8gpsidiW_3W8gCdA-QGOfQORImN4IqoaWf_E0,247 +diagrams/saas/cdn.py,sha256=qqOVqWum2WyqCEGFhdKsGDY-Voz-9RX3tji-ua0OV6Y,327 +diagrams/saas/chat.py,sha256=GlMa5F5hI3q6LjwMO0OBTwhpoy0841Y09sO-dkokMYQ,586 +diagrams/saas/communication.py,sha256=UOIUlrclC0dLTTsiIbU8zNqWBDtlN0hx53sn93Jw454,265 +diagrams/saas/crm.py,sha256=iuBNPveq9NOGjJpJptqHl9iIRpwfDU0mP5CvmAIRO64,278 +diagrams/saas/filesharing.py,sha256=k9JDZXGnK2CVHuMYzKBCRPipJ52LckBwOBrxTst5ubE,263 +diagrams/saas/identity.py,sha256=EpVgHcuIQoy4A2hOfebF3_yWEq13fkdWz8HcPZZw3EQ,291 +diagrams/saas/logging.py,sha256=E-RNDubbBep75B5lTLrSTnm8L_UwEs1XDebZVa9kjt8,396 +diagrams/saas/media.py,sha256=9H5mHd6DavmTPyEAQ-KOLkQPrX9oFxlGLsS1dwEa0do,241 +diagrams/saas/recommendation.py,sha256=4vk1xioxPIi9-KiBdpjIvzvcNY5s2yJAl0Ol-DzT2vs,273 +diagrams/saas/security.py,sha256=EGkEAhQ8nO85bQsHUVgoxP6_ifda8HpXqGPLqFKVVWE,313 +diagrams/saas/social.py,sha256=CTyccPJ0U9_IkC_CWdK4uwyoFrkMhIg1mk8bl99g8tM,293 +resources/alibabacloud/alibabacloud.png,sha256=m5QqWWd_U91qVj_6tc3WpmvRk_HOZi-_A3LPGEbgr5k,8452 +resources/alibabacloud/analytics/analytic-db.png,sha256=3pcju1WWXt3BelnZMdrOcXhqp1-BV47FEpjSZSxa1mw,13210 +resources/alibabacloud/analytics/click-house.png,sha256=LEXTiA8LQXCwf9rLrr5JwYs6Vnv7Q5oFuOYw7-wpoF4,1540 +resources/alibabacloud/analytics/data-lake-analytics.png,sha256=BRdacNzzWKpHpO65HkgLBXWfe5O4gBuwBraSFPPt4g4,34815 +resources/alibabacloud/analytics/elatic-map-reduce.png,sha256=Xfxrj4Tqm1olRZaiF2WQaqBvad9Pj_gX4FodSsH94d8,15867 +resources/alibabacloud/analytics/open-search.png,sha256=N20mEHiD0Bru-8Hus-BOz0l_pZ1PDtSjx8TmJyx6MtQ,17579 +resources/alibabacloud/application/api-gateway.png,sha256=mF1OspCz1s9FP2avsHo35oShVpY361ZU2zxR0jYtm2c,7820 +resources/alibabacloud/application/bee-bot.png,sha256=Pehx90Ci365LsgK7vE0vW4THjJN8qqcuns32PcNy9sc,13444 +resources/alibabacloud/application/blockchain-as-a-service.png,sha256=laP04iCoJgrvQkWz2xRguBVloq_9km8VzaQUEr4kogk,9261 +resources/alibabacloud/application/cloud-call-center.png,sha256=BMFTAVkKj2iZsWlE6JbTC9mekEmz2Xfe7I4GaJv0BYU,11978 +resources/alibabacloud/application/code-pipeline.png,sha256=u4zZadt2FcpoJIX_glVGfUmWvJ7lqwaeNikycfI1dhk,10405 +resources/alibabacloud/application/direct-mail.png,sha256=Vr8ruKeVUW0Q7pN7u9cvt6PgOqGkxj2aMQLKIJDFjHo,9246 +resources/alibabacloud/application/log-service.png,sha256=fImdrS77pvdmEy0YioMIFHPj-9K7uNsnqwT3RvhPZAc,9719 +resources/alibabacloud/application/message-notification-service.png,sha256=cl4DYt_qGF8HLmX7h0Tv5P1hy5dQkYrbV7_cuQN_gzg,4380 +resources/alibabacloud/application/node-js-performance-platform.png,sha256=XPsxpZxEPlW0KcAZD__Ptq3_UQN0hm-6tAV52m9kAmE,9593 +resources/alibabacloud/application/open-search.png,sha256=N20mEHiD0Bru-8Hus-BOz0l_pZ1PDtSjx8TmJyx6MtQ,17579 +resources/alibabacloud/application/performance-testing-service.png,sha256=chYePhpJYFx0PDag2cF6u9FT7KwGUetHaosGjnxOmuU,7130 +resources/alibabacloud/application/rd-cloud.png,sha256=KP5G2fxVv1LPsZPgLfJlyKmN0bX4rC0WRk30ilr6Ibg,12132 +resources/alibabacloud/application/smart-conversation-analysis.png,sha256=nE_egD6dVqMwmVPVizweDRADPoifyx1Enw58Hlzb-t4,10074 +resources/alibabacloud/application/yida.png,sha256=HNB1ZmIEGZsVuag_ymLKjc3tg2-lu0_H_B7GzjiAx8w,5343 +resources/alibabacloud/communication/direct-mail.png,sha256=Vr8ruKeVUW0Q7pN7u9cvt6PgOqGkxj2aMQLKIJDFjHo,9246 +resources/alibabacloud/communication/mobile-push.png,sha256=QJNaYDb8wXvZA2EE0cAzZpxFzhjgKLLZFxxWb8r2HQ4,10068 +resources/alibabacloud/compute/auto-scaling.png,sha256=rXjH_pASjn0e9HsYTZBstsMXsiUh0D_hYjzFIQyUWec,14937 +resources/alibabacloud/compute/batch-compute.png,sha256=uCtetlp4rG8ga6MeSP7eU9K2aetUx8YukOtPfHgx0M0,10420 +resources/alibabacloud/compute/container-registry.png,sha256=QqaoHVcnIsvNOo_2cWHzBTrFNSCIyEVv_1Hb98igjPs,12396 +resources/alibabacloud/compute/container-service.png,sha256=Ca2YwcXBMxOxOdQR9RseruyRysUQYl11OibFem2BKug,20166 +resources/alibabacloud/compute/elastic-compute-service.png,sha256=MiV3BtnTlZTEzwGTJ3kGXKDwXDz9z8Z6ahJQNQZ6UXI,8822 +resources/alibabacloud/compute/elastic-container-instance.png,sha256=C1-17KbtaGpRl70kGJSh2I0URJbRTJw3CtfbTgTZekg,20076 +resources/alibabacloud/compute/elastic-high-performance-computing.png,sha256=vdwQwcukyWqWAmC07-lIGMtAQALK_vn07atuG--m87o,16019 +resources/alibabacloud/compute/elastic-search.png,sha256=XJ9AZTw2ioAqRQp-wFHan3HcXPynq4rYLB47i7iNHLc,11425 +resources/alibabacloud/compute/function-compute.png,sha256=JIA5Rf5_DzzdZcLZLO9YFpj5xE892yPkxyIHd0wvf8o,8925 +resources/alibabacloud/compute/operation-orchestration-service.png,sha256=5je8wVNmYNa4kanQSoO3oMYMYnDSG-ycaf1L0hJTeTM,10248 +resources/alibabacloud/compute/resource-orchestration-service.png,sha256=8qn2WHwwJdH_EG53d9fJoMDyowvl6woYpc2rmOtdPfg,11770 +resources/alibabacloud/compute/server-load-balancer.png,sha256=ONH0i0GcdXzsKdtJKKRZhr6YbpKB0jIStzojdpkobKA,11425 +resources/alibabacloud/compute/serverless-app-engine.png,sha256=xDABZPxGBpMRsSTjjY9udCnPzuFf3VnnKFt2UctRlC8,17673 +resources/alibabacloud/compute/simple-application-server.png,sha256=gAQZFobXNolLUYexm-o9piZvHM_-zirBXszL0qJF2Yk,20334 +resources/alibabacloud/compute/web-app-service.png,sha256=mR2M_f8ES_Taoc7th6q8gxKRGGmp2f1SqQGMM11dy9g,16860 +resources/alibabacloud/database/apsaradb-cassandra.png,sha256=rh_MhY917EK4tWzQOuiE8UeWPP1FMmjlcC_Y2TT6VSw,10983 +resources/alibabacloud/database/apsaradb-hbase.png,sha256=QTsBqy-T2p5Ynis8Pt4jdSC0sKew4RCrAS5N6wm7kfY,13238 +resources/alibabacloud/database/apsaradb-memcache.png,sha256=HoOFAf6-PsyUysKuRiZx2hqyr3ua9E2GlafmUOp_XSo,9210 +resources/alibabacloud/database/apsaradb-mongodb.png,sha256=M21rkj9-fxDgzvyiH1lrSAxH_VaI-WmHiEfvn7AOd_o,14423 +resources/alibabacloud/database/apsaradb-oceanbase.png,sha256=CKv0O0_z2gNqqW_LoCz1noWKnaekp38CKD6dNjXxnf4,18498 +resources/alibabacloud/database/apsaradb-polardb.png,sha256=aTBsLjqhIMt8jsBjaYiE_fLPnZoumF5CiRpzRPnb8FY,11872 +resources/alibabacloud/database/apsaradb-postgresql.png,sha256=MWEm87U8BvE93Jsrsd0QpE09b6QcVYzSMRzTeWTy0Iw,10340 +resources/alibabacloud/database/apsaradb-ppas.png,sha256=lyxGtUWK-PbI-EgrZAwuySmOOC8yXpwxbTQuUA7pmV8,14331 +resources/alibabacloud/database/apsaradb-redis.png,sha256=5kJKcxJNnfCrPdmj00BpdPXrs25BH56kNdc-Ec2jfn4,17949 +resources/alibabacloud/database/apsaradb-sqlserver.png,sha256=7uc8yPRfRlrKx9BcBFW2gWegettfKUBuGcR1bFaQXPY,21798 +resources/alibabacloud/database/data-management-service.png,sha256=WOAS8oazlwvXlI7-XahIVTNq3aFV8ASgnUSHM0Vvdzk,21824 +resources/alibabacloud/database/data-transmission-service.png,sha256=y0EUKNuXFJmyZYcIn3YwEQA_74qUmV_S9t6trrNsrfk,12086 +resources/alibabacloud/database/database-backup-service.png,sha256=Lldb7UkuE2TxjvfkMxX39jWxT9dHNUGsnDhgR27jh0o,23920 +resources/alibabacloud/database/disribute-relational-database-service.png,sha256=LGWAqBLbnF2KFIUldcwPV139ltsYLPlFGmYxTeNqMJQ,12077 +resources/alibabacloud/database/graph-database-service.png,sha256=PnHlowjW37vm9nJGr9YCY-y_kEon5FtQA_BLU7DsWhc,30047 +resources/alibabacloud/database/hybriddb-for-mysql.png,sha256=TGGkrUAwtd1cC99NbfJ2ElqA071Ss03AoEL0CZ9x_6o,24441 +resources/alibabacloud/database/relational-database-service.png,sha256=LFxd6yInku2HSLj-Wx_mdEkVzqu8sVTsMhMTZ4CgW2c,15525 +resources/alibabacloud/iot/iot-internet-device-id.png,sha256=tq5AGrFB6Jxf7w227wBQddqGTqmvVPw0PyBfwjbWZno,14315 +resources/alibabacloud/iot/iot-link-wan.png,sha256=UUOvpwCQbU6nxUnFBV5F6zoJQK--UqvsWhWgjCObYvI,28596 +resources/alibabacloud/iot/iot-mobile-connection-package.png,sha256=moNrZUhMD0NF4nrJ2aS7Vzg8TUIP602tXSXbNVWgSLA,6824 +resources/alibabacloud/iot/iot-platform.png,sha256=ItmE3tTtWnoOFON0gIg6o6JzcDbsJ6tES2oIp27fGRw,12202 +resources/alibabacloud/network/cdn.png,sha256=X0470b_048AXsWPFx2Xuwz75OMH7eHUsJn7b8yXElCo,8366 +resources/alibabacloud/network/cloud-enterprise-network.png,sha256=Zg5rikpVZ_9pKkhQP1YS1LVJ5tNV2G2M34SXqj47QO4,15499 +resources/alibabacloud/network/elastic-ip-address.png,sha256=fIXoSJyfuQuhnvb-BInrvDCFVlWxr7etYynRieQkOvM,9595 +resources/alibabacloud/network/express-connect.png,sha256=5M5qDj5oQMW7yt3MJ4amygDYLrKQo099-nPetB8Odqg,8286 +resources/alibabacloud/network/nat-gateway.png,sha256=epcRnYUN0msaiQCHQkFZzuCDLu1ZTw0PIEfvJzHOCC0,5959 +resources/alibabacloud/network/server-load-balancer.png,sha256=ONH0i0GcdXzsKdtJKKRZhr6YbpKB0jIStzojdpkobKA,11425 +resources/alibabacloud/network/smart-access-gateway.png,sha256=Lm5kpXB0VqeuGOqP-rguXjmPm4HhE9pbPnoJKuaHhrw,8302 +resources/alibabacloud/network/virtual-private-cloud.png,sha256=2hsINTMTC5pfDkUB4wM0y_0tA8YvQ-6PoigQalt7v3o,9764 +resources/alibabacloud/network/vpn-gateway.png,sha256=CPbHIzad1kp4RPGioeySHBFTrSuGV5VSPXdzmZkbJgM,15628 +resources/alibabacloud/security/anti-bot-service.png,sha256=boDsuTW2XWbSGSIsWHA8ZK0acPdLUyLE0vQ74pHQfxM,18056 +resources/alibabacloud/security/anti-ddos-basic.png,sha256=JqUt5oqXesXqSTpgFuoh3MaP1H152w3i_yL7jXnQVPo,12511 +resources/alibabacloud/security/anti-ddos-pro.png,sha256=tOSd4Jc9UVTL1nVtd6SncD0_UCuRUrWRzYCXAS7aQI4,11756 +resources/alibabacloud/security/antifraud-service.png,sha256=Ftsj8ZLHgKHb8oN42AcaT-3bN4G40y6AXQUGZDnSaSU,13913 +resources/alibabacloud/security/bastion-host.png,sha256=ZAApQcTtTaqVEpWLKUjKR0ysLlcm5ajTkeynMfYeiC4,10571 +resources/alibabacloud/security/cloud-firewall.png,sha256=KlE8cG8UUjUrtiZrtRLNPmXOXOjZ7eLoZGcxhTJRRJk,8365 +resources/alibabacloud/security/cloud-security-scanner.png,sha256=Ap5I_yiTlcX4OdVyx8PZzdTvexRhVNzyetS0b6AzTCA,16386 +resources/alibabacloud/security/content-moderation.png,sha256=fI9Lt8KG1e9RZbNZSEySrIyZciVpgNMjXXxlgA_zaeg,8447 +resources/alibabacloud/security/crowdsourced-security-testing.png,sha256=dSpx4CP3q5uD9dJUycqDe9eZyzIK8bxA1zrxidLUnIE,13496 +resources/alibabacloud/security/data-encryption-service.png,sha256=EyYQIPnJvFEZ_gHqOMPugkjv3owUVQxHSsQEfQOThp0,8485 +resources/alibabacloud/security/db-audit.png,sha256=ObPhrHDXtB8vEJ7SBktzwff1uHnlXB2vJ4gwHMCX2n8,11101 +resources/alibabacloud/security/game-shield.png,sha256=3146S6pTGM3xFwCSDTk7pqrddLnU88dOz-6jm_vt1KU,12598 +resources/alibabacloud/security/id-verification.png,sha256=BXHr4Yi5nc6IUzp0XTtRuTH-ylI9OyawSNSZCkIOE-w,11089 +resources/alibabacloud/security/managed-security-service.png,sha256=Zkazq-d-7hEewc_KNRx4k4NCSQHtnpr205c-SPXlyDc,10319 +resources/alibabacloud/security/security-center.png,sha256=6juNG1NFDg96dILu3W4h7loMAGiSAa-GT7iliYcdsfg,12283 +resources/alibabacloud/security/server-guard.png,sha256=CAWjtd8fBEQRYTNqRDHGtH2M0MtyGFhGDJRMFyhjhHk,10445 +resources/alibabacloud/security/ssl-certificates.png,sha256=UihTtmQHjcnal6LbBkefUolZ-6WLCseut4mZvoViIus,10867 +resources/alibabacloud/security/web-application-firewall.png,sha256=lWCikTG5MRrTDKzCyLUPaE-zBVWrYhv1uAEOBA5cd4U,7989 +resources/alibabacloud/storage/cloud-storage-gateway.png,sha256=C_xpjTCr-G1V_eI4bqk6zl2jjPmWqtT6_YrPQm58P6w,10155 +resources/alibabacloud/storage/file-storage-hdfs.png,sha256=mpQ08Sl5IMwWwPUVTDsGCX9YxslPUMZjn5t_-kRIMAk,7600 +resources/alibabacloud/storage/file-storage-nas.png,sha256=K85qpd_4W9u9NCTJ4dzpV7sh5Z_FFzmPN_WNaWQCg0s,8170 +resources/alibabacloud/storage/hybrid-backup-recovery.png,sha256=zDrn8npSpXd2U8peta3S6CGOGX7wX7LyEt2fpXQffKo,8196 +resources/alibabacloud/storage/hybrid-cloud-disaster-recovery.png,sha256=t9ISyzwhrBzmH3OQg3wxBKEoue6NA-KCK1U-3PCpL5Y,8722 +resources/alibabacloud/storage/imm.png,sha256=EcEkflX4xrrJZVK3TLNscVKp7rjToYatAYofnlf5DRo,10217 +resources/alibabacloud/storage/object-storage-service.png,sha256=ad0USD8K5vjYkGtmTD5_XascOdrlumPdLzS6c2uNOGI,7522 +resources/alibabacloud/storage/object-table-store.png,sha256=brSSKQqyBcJoRutWt6woWUDrdbXaoriohIVhw4ZQ09w,7788 +resources/alibabacloud/web/dns.png,sha256=MFjCkXVeyV_m59KfvQ3R3Ss9hFvvtG7by0OdeFNFLdw,12188 +resources/alibabacloud/web/domain.png,sha256=KAHdimbbEfCcOb4md8F1PvLC0lJCpt6GPIZ9e1xRl3M,15088 +resources/aws/analytics/amazon-opensearch-service.png,sha256=V6w8fCDT8-dIs7lSsEvSrqsZaIyAwZ-qyPpYrOzXpDw,57983 +resources/aws/analytics/analytics.png,sha256=Yb7ev6kXPMie3AoyYm6fWMQvznP85T4C40EjJxRZhHg,4220 +resources/aws/analytics/athena.png,sha256=79PmL1BMf9ZjqGPRfBgxRWDyxgS-Cg5y1GfmliAxAd4,19220 +resources/aws/analytics/cloudsearch-search-documents.png,sha256=zhIAyC7fmbt9ms_OwgUAil_lVABITVtkp4gR4PP10kw,1988 +resources/aws/analytics/cloudsearch.png,sha256=MD1pm69LbvzdALgbrSDJblRNu7CJ65r7aOwJhwmtSiU,15693 +resources/aws/analytics/data-lake-resource.png,sha256=XzheE2lIu4uXLUUMhZsEht3KiZfp9rKj2qGuBj-wjCE,28206 +resources/aws/analytics/data-pipeline.png,sha256=Jj2wmjEZdNHiBAazu7xdCbPsPwYfMFKckraFlkmYUa8,11146 +resources/aws/analytics/elasticsearch-service.png,sha256=AODgBMYUFS-WNb22dLdL-Pu356MqsJ0tJX2E1IzOtO8,13780 +resources/aws/analytics/emr-cluster.png,sha256=l3f9vThG2M1yb1_e721imVZSflUJDf9t49w6e2U--G4,4641 +resources/aws/analytics/emr-engine-mapr-m3.png,sha256=WRDxBPaAzoCwlBjT3dhxfQaGIYcb1xxqBV-0TxBaqgc,13020 +resources/aws/analytics/emr-engine-mapr-m5.png,sha256=fEoDRO_ZjDq6DXlHzmDlNwejEAQraEOIl9q5TTsZjcI,12897 +resources/aws/analytics/emr-engine-mapr-m7.png,sha256=344MRiREwwLOaJ6CTdcTKUPl_cMcREDjJ5rdbEPP-js,12689 +resources/aws/analytics/emr-engine.png,sha256=Miaheega-PwkV1aMbM77X9nSqcg2HEklXbY1AzbxQCk,7185 +resources/aws/analytics/emr-hdfs-cluster.png,sha256=joI2GN8PtDk1qG1pXvj4VqFK3TLCZaeFka80bOFf-Ss,3866 +resources/aws/analytics/emr.png,sha256=pD7MaOOAuzDxnJDzslVv4Y8pK2z62YTAJcY5xHJ73pE,18945 +resources/aws/analytics/glue-crawlers.png,sha256=CoLyo-tkppYn_-aEwzh9TNWv_T_Hp-a9BGI8dheM1YI,4323 +resources/aws/analytics/glue-data-catalog.png,sha256=qQDxqyTkC_5KYgx1_EkfC9ljN95icGeyY3eHNPerlqs,2728 +resources/aws/analytics/glue.png,sha256=NAGtw83Z05VprxjKode1JzYIuXfiFSuBvm7Oepan6tA,12228 +resources/aws/analytics/kinesis-data-analytics.png,sha256=qAgLyLWgO108xtOEK9pIklOCHFYNbXA5exCKMFnrI-A,14391 +resources/aws/analytics/kinesis-data-firehose.png,sha256=uvQHUT9KC3lM6EE06zU1FM_n6eNUqEq0Uk0gYkkuDn0,13496 +resources/aws/analytics/kinesis-data-streams.png,sha256=gpJ0eWZzZwA8JpfQQceA35oZi30XRbgMp6xDP5-f3Yo,17111 +resources/aws/analytics/kinesis-video-streams.png,sha256=EEJIW7TTnXJNwZjJK5VPLU4rWpXhj2AHNhUj150-5fA,15688 +resources/aws/analytics/kinesis.png,sha256=1uDwwUZr2e6PRsVYRnCfNuT5mfd-J36jxK0xtjC6eS0,14548 +resources/aws/analytics/lake-formation.png,sha256=jrDTQVM8HNqSYTh-oSQjMbRSHeS5t2_LlfT5-bnvVUA,18176 +resources/aws/analytics/managed-streaming-for-kafka.png,sha256=VmIRgdzMblAKvmNBQsZyjJJz92u447Tdik-eMdmUhGU,18110 +resources/aws/analytics/quicksight.png,sha256=Q-hx6DOGQdvaFaEt-ERvQPgQieuaBp5TnJZ9XbOapIU,12343 +resources/aws/analytics/redshift-dense-compute-node.png,sha256=fmqMwV9Cp-W5TYiuf-bW98y-PTkc7gC8xp_J5bxVA0Q,4361 +resources/aws/analytics/redshift-dense-storage-node.png,sha256=BY2SzqvAhS_jGbWxfqu65mW_Ptx8oNF3pyyLSxdjJSA,4444 +resources/aws/analytics/redshift.png,sha256=U9EUUa0NNgkHpv-44UQUH1mFvUnurM8Jb4qfXSamClw,14450 +resources/aws/ar/ar-vr.png,sha256=AJJbwYKnMr1B7jgtXonbBwZ04-16iodW6Td3mqgrEGM,14361 +resources/aws/ar/sumerian.png,sha256=ek06VgDN25LJbmI5hRLRLMggMdxsMTgBe8eRfy943-U,19105 +resources/aws/aws.png,sha256=MvAN-Bh2fk7WzW-KkdiQNiszglUMFREv4JjAA1oMnX4,19260 +resources/aws/blockchain/blockchain-resource.png,sha256=hH6SUJzvhBgdvAPhUVU8Z8RgSLIEgjiNLZG9W1BDjOg,4378 +resources/aws/blockchain/blockchain.png,sha256=UAcVFqqgCJnKyzfdim9PcAZADC0o9BksYfnLM-5AVD4,11731 +resources/aws/blockchain/managed-blockchain.png,sha256=qrjEMr-XruQR-8DTLL925kMRWOdLZsNnJ4S_SHgp-vE,12426 +resources/aws/blockchain/quantum-ledger-database-qldb.png,sha256=CvzDbCvNhu1m26xufC5W9MdCkbrzcP23QWmNIW-tueo,14115 +resources/aws/business/alexa-for-business.png,sha256=kkDh622849LNf1GHoAZwYI3-ZXXbe6mD0Cs919ZJZzw,16455 +resources/aws/business/business-applications.png,sha256=JPYV7uW6UUPxgSfYtKpShS6LcqGFbRr2KBl7ysMrV8Q,14648 +resources/aws/business/chime.png,sha256=lU9KETuqnOxptr2K1vnwhaG4R0LLklwX9HaZXSs-5E4,17100 +resources/aws/business/workmail.png,sha256=XEMOIN0Ajm-03pMLhUdSAwqVfUMOaA7rMR6l6P_uzfM,7852 +resources/aws/compute/app-runner.png,sha256=JDuVhFD9A2GikGHtRBuylfmIbxW5-4-I_ffeBAespJg,34559 +resources/aws/compute/application-auto-scaling-rounded.png,sha256=bCneCmCMSnhCo9zVAvvH8JLOu9cXzh0_Xq4yOZNwlM8,22238 +resources/aws/compute/application-auto-scaling.png,sha256=bCneCmCMSnhCo9zVAvvH8JLOu9cXzh0_Xq4yOZNwlM8,22238 +resources/aws/compute/batch-rounded.png,sha256=fWZPyynR_ZnD_FmZRhs04pen4coxG6xpC55ZG0g_1gw,17242 +resources/aws/compute/batch.png,sha256=fWZPyynR_ZnD_FmZRhs04pen4coxG6xpC55ZG0g_1gw,17242 +resources/aws/compute/compute-optimizer.png,sha256=KK-aPeAl_dPRwz5xNw2unNUi-TbyI8VQvz4QMf_pyrI,24332 +resources/aws/compute/compute-rounded.png,sha256=1Ti55Y4mYzpA55xW28ssHRhHdD7zhT5151MHKniFmEk,11243 +resources/aws/compute/compute.png,sha256=1Ti55Y4mYzpA55xW28ssHRhHdD7zhT5151MHKniFmEk,11243 +resources/aws/compute/ec2-ami.png,sha256=Mt8coO9zwesbI3RQpoK3x53wjp2HWJXpNl7oa4PTlpo,5908 +resources/aws/compute/ec2-auto-scaling.png,sha256=0u_DEPpcVs5gB-NIF8pklIPyTz9kPXiiFRYhk8_o_NI,10841 +resources/aws/compute/ec2-container-registry-image.png,sha256=h--JsNPsKNZITttr8wtAhcpkIk1kkXPH-qoYxjaJqII,10385 +resources/aws/compute/ec2-container-registry-registry.png,sha256=JN3DXljaNwr2YJ6tZIfQKbIfgZ10Z2uGGs2WpVq2PtI,7594 +resources/aws/compute/ec2-container-registry-rounded.png,sha256=6xMPi32kj9agybegU5Reu65LfdMIaP5Ml9UFaIZbJD0,14536 +resources/aws/compute/ec2-container-registry.png,sha256=6xMPi32kj9agybegU5Reu65LfdMIaP5Ml9UFaIZbJD0,14536 +resources/aws/compute/ec2-elastic-ip-address.png,sha256=fZACEUlP6FMyR-xox6yH9uBlG6RI4JyZcvD_YExd7eM,8267 +resources/aws/compute/ec2-image-builder.png,sha256=sTdfys39w83hYcvihvcrZeIHupDei7ZezpyFE8kxP8Q,20889 +resources/aws/compute/ec2-instance.png,sha256=6YdM6QcctN0Eb8Ix3S5x5WlQM5X2gMV3y0GiTdmLKj0,2691 +resources/aws/compute/ec2-instances.png,sha256=3pxZbqXCMU5XmewbntU0lzEy8-GDrQcFUzIiQAdO15w,3623 +resources/aws/compute/ec2-rescue.png,sha256=M6MSSospvHErss3iL8ZObXQh7CnQugerZWL6VStzK14,14768 +resources/aws/compute/ec2-rounded.png,sha256=t3yRitTtAtVP0o9UfYHq_Yi7HRuwwIrY4L8EiEWZj8I,8893 +resources/aws/compute/ec2-spot-instance.png,sha256=gcaOGkdAqfOn-43iCrv_rWJmL6WOPI5Q0U4F6ihVqv4,19308 +resources/aws/compute/ec2.png,sha256=t3yRitTtAtVP0o9UfYHq_Yi7HRuwwIrY4L8EiEWZj8I,8893 +resources/aws/compute/elastic-beanstalk-application.png,sha256=JbkRATKmpE32Ry4KCwu304VVC2pVCqF71WHCRSnVUDk,12850 +resources/aws/compute/elastic-beanstalk-deployment.png,sha256=aEAMn00YsLA8bJTzY7hrx3Mb8jWcuuBgoqqssWQvPD0,18984 +resources/aws/compute/elastic-beanstalk-rounded.png,sha256=3Ysr8ikSVaN-KG_FhAT-xRkNz2WyBuC3TZfwWvf-9lk,15475 +resources/aws/compute/elastic-beanstalk.png,sha256=3Ysr8ikSVaN-KG_FhAT-xRkNz2WyBuC3TZfwWvf-9lk,15475 +resources/aws/compute/elastic-container-service-container.png,sha256=vWGcKWH7vzVLCe_xT4YmhPohK3T2CVp5ymCyufxM5xc,2607 +resources/aws/compute/elastic-container-service-rounded.png,sha256=wesiDuH_Lxltbf8sDkzRO-yHPFRoOG0zgPwZfbv8vEs,16020 +resources/aws/compute/elastic-container-service-service.png,sha256=HoIyAVTCsqn-FINi0I4lBY0IA56YJU8G3OeMHeEf1Ek,4697 +resources/aws/compute/elastic-container-service.png,sha256=wesiDuH_Lxltbf8sDkzRO-yHPFRoOG0zgPwZfbv8vEs,16020 +resources/aws/compute/elastic-kubernetes-service-rounded.png,sha256=LhPZvybgGEnLQer6_CpmHsz-MgSvYbAL4bgRlUP7vwc,15851 +resources/aws/compute/elastic-kubernetes-service.png,sha256=LhPZvybgGEnLQer6_CpmHsz-MgSvYbAL4bgRlUP7vwc,15851 +resources/aws/compute/fargate-rounded.png,sha256=jhanRW_p_Dqo-YsjtZfwHS11ES_9NfScbtXaqn68tnY,16009 +resources/aws/compute/fargate.png,sha256=jhanRW_p_Dqo-YsjtZfwHS11ES_9NfScbtXaqn68tnY,16009 +resources/aws/compute/lambda-function.png,sha256=5asweVX6rKOKsFpKVkuGk8E1uAfhYMvEHUHLYY2Mzvc,25042 +resources/aws/compute/lambda-rounded.png,sha256=4VkywbzlisL5SQ5166c4-CmcW7--T3Dz_L_qm1UUqAA,12623 +resources/aws/compute/lambda.png,sha256=4VkywbzlisL5SQ5166c4-CmcW7--T3Dz_L_qm1UUqAA,12623 +resources/aws/compute/lightsail-rounded.png,sha256=Vcs03pCTO-9Eg16g9uIQl2SBWfgGSgqtMhKvELUYu2U,15261 +resources/aws/compute/lightsail.png,sha256=Vcs03pCTO-9Eg16g9uIQl2SBWfgGSgqtMhKvELUYu2U,15261 +resources/aws/compute/local-zones.png,sha256=nFoVqTIs9Gg68gD2AO8rDD84mA_1B2ToFXIqxRMUiIc,25101 +resources/aws/compute/outposts-rounded.png,sha256=uCnygf7ccBCjSc1UyAYuBtGuLoWD5gKSvBorM3OIe8g,11373 +resources/aws/compute/outposts.png,sha256=uCnygf7ccBCjSc1UyAYuBtGuLoWD5gKSvBorM3OIe8g,11373 +resources/aws/compute/serverless-application-repository-rounded.png,sha256=UKhkkO-bdgjV-t9mBrXZTU2Me7rZldwxjAfKnw-kOC0,13298 +resources/aws/compute/serverless-application-repository.png,sha256=UKhkkO-bdgjV-t9mBrXZTU2Me7rZldwxjAfKnw-kOC0,13298 +resources/aws/compute/thinkbox-deadline-rounded.png,sha256=6qOmEq8aK3gGgJVcUG1pK23I-JP7ibmP7wH-N3axinU,30591 +resources/aws/compute/thinkbox-deadline.png,sha256=6qOmEq8aK3gGgJVcUG1pK23I-JP7ibmP7wH-N3axinU,30591 +resources/aws/compute/thinkbox-draft-rounded.png,sha256=voW-QS2yA-kj5P2PqSWmpwiQM1fEeoCRpHxIboxTXmM,32862 +resources/aws/compute/thinkbox-draft.png,sha256=voW-QS2yA-kj5P2PqSWmpwiQM1fEeoCRpHxIboxTXmM,32862 +resources/aws/compute/thinkbox-frost-rounded.png,sha256=5RbyPHwTXNt03B7Hcg_g2rZ2EelxWsUp9ThSPRPZMvU,35237 +resources/aws/compute/thinkbox-frost.png,sha256=5RbyPHwTXNt03B7Hcg_g2rZ2EelxWsUp9ThSPRPZMvU,35237 +resources/aws/compute/thinkbox-krakatoa-rounded.png,sha256=cPDFelXvhb96GsDaQvPWqYVoiL9JkCCj8pcGpOwUYh0,35077 +resources/aws/compute/thinkbox-krakatoa.png,sha256=cPDFelXvhb96GsDaQvPWqYVoiL9JkCCj8pcGpOwUYh0,35077 +resources/aws/compute/thinkbox-sequoia-rounded.png,sha256=eGkc0a3DH844NgsHlpSgswlLxyGrGgnATTmx2c5uF7M,37943 +resources/aws/compute/thinkbox-sequoia.png,sha256=eGkc0a3DH844NgsHlpSgswlLxyGrGgnATTmx2c5uF7M,37943 +resources/aws/compute/thinkbox-stoke-rounded.png,sha256=-3GI2k3zHMV_Wec9D5UHDh_zE7-GRGDYLrzUrZsWS24,37110 +resources/aws/compute/thinkbox-stoke.png,sha256=-3GI2k3zHMV_Wec9D5UHDh_zE7-GRGDYLrzUrZsWS24,37110 +resources/aws/compute/thinkbox-xmesh-rounded.png,sha256=wlLydlNDQFiappngiz7UaTv98EKQRQVhwdFFV_VO0Lk,37530 +resources/aws/compute/thinkbox-xmesh.png,sha256=wlLydlNDQFiappngiz7UaTv98EKQRQVhwdFFV_VO0Lk,37530 +resources/aws/compute/vmware-cloud-on-aws-rounded.png,sha256=agnEG28Rz3vgQNSvoSGXdzsm9lpjDKuuFuAY5pHxow4,14722 +resources/aws/compute/vmware-cloud-on-aws.png,sha256=agnEG28Rz3vgQNSvoSGXdzsm9lpjDKuuFuAY5pHxow4,14722 +resources/aws/compute/wavelength.png,sha256=OstmbCLfBR3BpB4L-r3vDOCHWwN3VsUHpg39q5u_UqE,28330 +resources/aws/cost/budgets.png,sha256=C_41xQjwBgOuQFrqbL0JII-7jvwUi4tPCfXEgQQpTyo,13721 +resources/aws/cost/cost-and-usage-report.png,sha256=ZefGWmobjwv7SN6TatpTPfvG29S3ItEc7jxrEQLpcwY,11678 +resources/aws/cost/cost-explorer.png,sha256=uoq1R2bZv-q_CPweYTA4RcFlC1E0UBMgtCc3jFStIDc,16144 +resources/aws/cost/cost-management.png,sha256=WIF_I3xUPEWRP7pircLuiDLdMulDGUDCJqZ1qWWAnFI,16981 +resources/aws/cost/reserved-instance-reporting.png,sha256=SMeigtstNf5xvSirLrPkEJyorTQxlbdAIrw7I4W33tU,15011 +resources/aws/cost/savings-plans.png,sha256=yoYcueJ-e7LvtFPcpqVQqHUP0TVzpOlw-LnxPMouOBg,35912 +resources/aws/database/aurora-instance.png,sha256=OTdjROT7Tw0_eal3mYMSvCRsF9rIQyEXKqOzFYcTfFs,19877 +resources/aws/database/aurora.png,sha256=kgDI7Ty2oPRtBdkEVlEGyoKLMS5Xj9YVHFTiJOdqO6g,16948 +resources/aws/database/database-migration-service-database-migration-workflow.png,sha256=JR6-si_78Ycff004waFnwV7HZYzwXiPjTS9dqbYrvmY,18860 +resources/aws/database/database-migration-service.png,sha256=2av9NJES0vYd3BHIlADtcd8ZX3EHj3hiW7by1oWQs0Q,12447 +resources/aws/database/database.png,sha256=gGTI8JoIHYZj2bgZrOVyXenDhc34nJQE-WWgg2p7hKg,16045 +resources/aws/database/documentdb-mongodb-compatibility.png,sha256=EM7AOOaxHvbWKqxKhVlnf6hBB7svawwAXPFux-W1jo4,13880 +resources/aws/database/dynamodb-attribute.png,sha256=Fz6G5JdtmCyR5u0kYP7fF-QVzueNtjnVdny7nVvZiBE,6342 +resources/aws/database/dynamodb-attributes.png,sha256=QdTTZnXeZaGRK-RAS4z48Xnc8BleOlHLUzIKZc2SF_M,9461 +resources/aws/database/dynamodb-dax.png,sha256=O4I04TB9PKk9iRPDpMdwObcz3yulDiyFQdBdSZG7KaM,10522 +resources/aws/database/dynamodb-global-secondary-index.png,sha256=IOd1wyRs-y7UXlr_BMrKFjiIQBlSQ9RpJV0OdjjeP10,1166 +resources/aws/database/dynamodb-item.png,sha256=Ogkl4wJ9Qk_KH_cBK94lUFAt-BH4LsGH_DuryMyTw_g,6360 +resources/aws/database/dynamodb-items.png,sha256=SWO_CXsNx_yOTs6HzylvH1l1dsg1UuYLA-XxY4hF1VQ,9407 +resources/aws/database/dynamodb-streams.png,sha256=6ShNDV1QMcdDI56UCa4pcC59ExGG2GDs1UWrXQPxz38,9365 +resources/aws/database/dynamodb-table.png,sha256=BXV9tEkzpSGZDIhC6q47sUzYIq_b9zfBKYwEhWrSDoQ,1329 +resources/aws/database/dynamodb.png,sha256=HFd8qv4pWizxxpNomnOmts-6okIPTHSS3cGOcAyhYOg,16774 +resources/aws/database/elasticache-cache-node.png,sha256=SWJDWTwFReGy4RLNq5PvViRi6GxVN9Pii0T0wa2p4Ek,9307 +resources/aws/database/elasticache-for-memcached.png,sha256=gFA2pIG0Mu7xrrrsN0p4YLI5wceHlh-trOqnXBHp0_w,16495 +resources/aws/database/elasticache-for-redis.png,sha256=CCRZyGbLEUUwColBJoZkZHZ_S0XMlvONV3A_ZE6Rt9c,17018 +resources/aws/database/elasticache.png,sha256=JRz2VfhOZMIQ8vzc8EneqlvIchW1tgdUU2waDrz7Ny8,13653 +resources/aws/database/keyspaces-managed-apache-cassandra-service.png,sha256=FQnhLbeSVW9XnIKDGXACExahOsGrc5oSD07i8JuSI50,23533 +resources/aws/database/neptune.png,sha256=T6IxYQbuPc4rpfOnkIYJujTLsODGNNu7XniegVAyAo0,17004 +resources/aws/database/quantum-ledger-database-qldb.png,sha256=KVr-ZJpPk84JsOEpxQj3RuQDTT_Q4CKf0bGxXAK30-M,14365 +resources/aws/database/rds-instance.png,sha256=cgN6560aat0Gx-XWiw-V8ovW4o3IjC7OZyxXylyz7bk,21913 +resources/aws/database/rds-mariadb-instance.png,sha256=poR3lKJNQrbJpeY2zUB8UFo5ckaoqjNpqPCutAl2KK8,17805 +resources/aws/database/rds-mysql-instance.png,sha256=qymqHrUAAg9BSCMMc5MLb3y2FFhE5y6Us9WP4CpshS4,18593 +resources/aws/database/rds-on-vmware.png,sha256=Cn8XhFE98SiwloTwzte2_uXxnmUA_bYXV6Yo7Oty0F4,14294 +resources/aws/database/rds-oracle-instance.png,sha256=6cla7vxiYgRsf3q8_lXr0dKHwXhSwSu3TNTk6o2ShK0,18654 +resources/aws/database/rds-postgresql-instance.png,sha256=n8sPGVVXlMVE23ETqj0vUfJxk6ugfWKBq_fwHLSEgng,20899 +resources/aws/database/rds-sql-server-instance.png,sha256=UU_FaoJqWcB2qh69FZOr9kA4xggPoJhAI4JBsptUbnI,19557 +resources/aws/database/rds.png,sha256=klCM1cLMgqPL6-BlfQK2kpQneodrIAENS75zzKKzyTU,14316 +resources/aws/database/redshift-dense-compute-node.png,sha256=BucnK3Ace4MaOqI96oWv6g4hjUDDZynBuoztPBS_55E,16581 +resources/aws/database/redshift-dense-storage-node.png,sha256=QR8zABm7szftWobfta6rRlZkZnQ82Srmf5qkNyl8qNM,16917 +resources/aws/database/redshift.png,sha256=IA2S91Oa0LglIlO34kfrdD6q0kMFugaoCedNvaWiRqU,13975 +resources/aws/database/timestream.png,sha256=YqerB9gjgyv9tms3z-OLslbrHbhExzDrsTEqyZb5kwM,14279 +resources/aws/devtools/cloud-development-kit.png,sha256=YxbbjhO8ZDaoyl8IwM0ScG5TWhqTRPdnIuPi8s0l2n4,35984 +resources/aws/devtools/cloud9-resource.png,sha256=-woz3VwPIMmKcFtWppMkxdC2_PZhP6gL8fBpJvgVi9w,12234 +resources/aws/devtools/cloud9.png,sha256=Cvpqiw9ErKnsPu2mMY-FE8_ZrgfmxqZNekdJHKkciNs,12660 +resources/aws/devtools/codeartifact.png,sha256=B37Q9ccp37czxFYA2GlpAuzS_dp4x0uXwq8Qd4YKXrc,38562 +resources/aws/devtools/codebuild.png,sha256=jyPxUllV3JdtUuScHQdgoEEez_-F9uQtDM6HS5ylSjM,14925 +resources/aws/devtools/codecommit.png,sha256=zpfvR62tBbiXczwz5woaKl46slSNBYBNwgvMCFSk8PI,14785 +resources/aws/devtools/codedeploy.png,sha256=TMxsnWiahfdiU0t-zmINYZwywGQYInL_GIvXMWZz3lA,16827 +resources/aws/devtools/codepipeline.png,sha256=g2HsDWq9CNhA26ECrXSQV_Vn1zoi8bEAZbYeclAVeLQ,11048 +resources/aws/devtools/codestar.png,sha256=U1smxEJP_kV-UmTNuTMbiZ01N3xg968thN33myhZUPg,18562 +resources/aws/devtools/command-line-interface.png,sha256=2BYHjP3vo9l3pECB0S1TBf3om79GR9Xp-OVrhsMC9V4,7827 +resources/aws/devtools/developer-tools.png,sha256=3k3jZVeibn4CA-RIZHbio09P4s3FsS5w2YN1u8AzsC0,19300 +resources/aws/devtools/tools-and-sdks.png,sha256=ue_A_HfiHW6Ri7od6DeQo_RynhL3wpnvjn8Fo49-GP0,16562 +resources/aws/devtools/x-ray.png,sha256=TKesE75Ju_6n-Sb54_bPKlnhm-V8awa_CSDabJwBjqw,15592 +resources/aws/enablement/customer-enablement.png,sha256=CdPVd6PSkS1T-C1faZok8Re1cO6W0Wje0iUttlWJyW4,18196 +resources/aws/enablement/iq.png,sha256=ziGJV985pagxibWQxj0KyCxXUgH5ycKgS3qqaVnT3-s,31129 +resources/aws/enablement/managed-services.png,sha256=-RMdO1E2WBbYsECUhKZevlHgm0I6QHvkM_WoUPxcdJE,40806 +resources/aws/enablement/professional-services.png,sha256=Z-6VSYw4qAKxWr02MW1kSrJUp2Dm6ixGcgaFmfEp5Aw,28329 +resources/aws/enablement/support.png,sha256=W6RLYm1QMCUSMhNxBFnBBArCp4mKQR5rjS4kq6PV_vg,32729 +resources/aws/enduser/appstream-2-0.png,sha256=2TdeZX6kg3Iu6ufLWNNiGPGZq9Sy9ZazsVmaFqODXyU,7702 +resources/aws/enduser/desktop-and-app-streaming.png,sha256=Qap0gaM_GbB7R4nTddWfBAvhbqRb3M6F-dJ-aEBd5Lg,17954 +resources/aws/enduser/workdocs.png,sha256=MjfapuQk5zZmrCH-Cxad3i5xCjY3in0cgUAIRloHHrc,7646 +resources/aws/enduser/worklink.png,sha256=NiNr1TFYYRpjG9OBVjbg-aAHcSUe72ZRjilFwh4Oxio,8304 +resources/aws/enduser/workspaces.png,sha256=w5Eysfb10SS_5nf2qypQPio-j5j6glRcXU5WCNCv3jU,17749 +resources/aws/engagement/connect.png,sha256=hH9htrTv0C8ox6ph30gpq5sK_9yKBZ283JtUu8bWZJ8,15786 +resources/aws/engagement/customer-engagement.png,sha256=EdAZOg8H9QSnME5Ou24OmNlLitDC1_Ra1_3GP1KuvkA,15130 +resources/aws/engagement/pinpoint.png,sha256=I4Q6PZqFsG_9MVifm1sp44ngK5S-aW_keEXPZmFlkiQ,18802 +resources/aws/engagement/simple-email-service-ses-email.png,sha256=7cW65NxVhohiTy0LBcCclHaY6CCXD-_OTTqd1EJlTts,18396 +resources/aws/engagement/simple-email-service-ses.png,sha256=Q0m67X070AR_GLg4K9V21YTnuhmd8VA-Xwqe_wy0nos,16227 +resources/aws/game/game-tech.png,sha256=rdyQ2wKtw0UqA9GZiLhWFDDA9e2i4lJpa7aTaLXOG9Y,21639 +resources/aws/game/gamelift.png,sha256=Niaqam8mvqIWsttCNZZRKoHWZ9gRVRP7R_RpE4Ae5NU,18697 +resources/aws/general/client.png,sha256=5fneUwUrXlL9VPzoe6Eg_NHwO3pJmmQ9n50jxQZA11w,5059 +resources/aws/general/disk.png,sha256=dnJiVYZgnbWbXR1SGfvcgg_R059O9avG8WrkulaHzpo,5779 +resources/aws/general/forums.png,sha256=2FFf0as9xcFJROnBqLIyd25OCyjY-IeyY3UwoNUPWGM,3312 +resources/aws/general/general.png,sha256=b4ABg8ojlQJzZ07y5A6WJ6o9ggVeAbPaEg6ywsaecOg,17893 +resources/aws/general/generic-database.png,sha256=MCIi31WboX7sMtKmz7jqyRl29MUS1EbBk9xFS9ihYro,2841 +resources/aws/general/generic-firewall.png,sha256=qoFL8C3goCBbNPfcrx9lghd6TSGdjdORU7IOpiSgL0E,5115 +resources/aws/general/generic-office-building.png,sha256=7QaCXtP-OH5m1uBOJRy22_gw9gLY8I-BgiDUGz-ZRPQ,1375 +resources/aws/general/generic-saml-token.png,sha256=1SyF4J7H933swoNv2L2KrB7mlNTqKdKp0btcvGIEud4,3759 +resources/aws/general/generic-sdk.png,sha256=GitTz1NFxw8gwnslKD1CdVlyKHvHzTNT_5fPet4dwzI,3630 +resources/aws/general/internet-alt1.png,sha256=_yVmFGytTMcAuCPN_KrOGp2aV_WlbWkfot8Gwr1oE9Q,15306 +resources/aws/general/internet-alt2.png,sha256=ZnlZCXnaR4k7LRlN-lW5iBtIR1-Pu7QSeAsWqPXjn_8,14940 +resources/aws/general/internet-gateway.png,sha256=TfMRzw924WNK3oi-ijmuB3Abn4XJrZ0WHt65YMaRdn0,16701 +resources/aws/general/marketplace.png,sha256=561XPwpG-Hj0vff9aoSZ6L2xvYopWZBb95nLMfp7QJE,17391 +resources/aws/general/mobile-client.png,sha256=7UxFiclO1HeBCITE6ohgzANyN3hBMPF8pHOgG8QyWAQ,4475 +resources/aws/general/multimedia.png,sha256=J9RQpdjjz8JeXWZpStEDd5fnjBjtaKuo1hsYQ5pRCoM,14336 +resources/aws/general/office-building.png,sha256=3XMuZ7rBBIix-FGfVdefzVP4-LGeykAN0DB_t6ODSwY,5001 +resources/aws/general/saml-token.png,sha256=erClOx0odHNgmnTatWdYHSLLU3T7rhuwdugaOREMcpk,17104 +resources/aws/general/sdk.png,sha256=-PT-_xX1fFu-OGaiizlqhOeeNpU_lTAm9UrYODnsWgo,18908 +resources/aws/general/ssl-padlock.png,sha256=fKm9lcnkcr1NxkWeUV8U3_C16sWOTIzMUt7VnGRnX_g,21107 +resources/aws/general/tape-storage.png,sha256=7kg12N-NKUF5bbuOA9O1OP-exd6vort39bcn5bxX4pA,18166 +resources/aws/general/toolkit.png,sha256=82Uo7KUlbsdjLpxHQSduFujYP6EQE4mTpQoWnIUSTf4,23279 +resources/aws/general/traditional-server.png,sha256=WDBOh5zhY1kqBfdLaSc8TDQoBI6ohAEXSNiA6oFx_zw,1164 +resources/aws/general/user.png,sha256=Z1OJTTBAZsPZl5-uI-7WtPetOTWKTrTVYcogxiDZTjg,3863 +resources/aws/general/users.png,sha256=WSQQXAwKXYCbIdu7WIpbhkrDsloZfAj9o-vTBGZ6mnU,5299 +resources/aws/integration/application-integration.png,sha256=gvj_wHZifRO8Lf0UFdE6YRTCZMKF1vJQwTSjUEEwPJ0,14787 +resources/aws/integration/appsync.png,sha256=-hGauDq-p60rnGF2w4P7e7dLqh2K0QBlWx2XG6GeDPM,15781 +resources/aws/integration/console-mobile-application.png,sha256=6ZPGmgcvXohukBO70PwxuEYRTFGn5Nv1ZAsn1x_Lpoo,22670 +resources/aws/integration/event-resource.png,sha256=q1nM0TrOtIBHsGldeJZhEBzq1iJHGjPcNWKxrp0EaC8,7381 +resources/aws/integration/eventbridge-custom-event-bus-resource.png,sha256=gCMeCK0_rWZIV2k23IbpnhOCJoP97_Qc4qKV7BpeQhE,18424 +resources/aws/integration/eventbridge-default-event-bus-resource.png,sha256=kH4DxeGj_N-sKE4K2XUTjQ82aZ04ukrd0lVT485lBGw,9836 +resources/aws/integration/eventbridge-saas-partner-event-bus-resource.png,sha256=PrdC4YpbZG4sFLk8focEtwRNF3whNuLTLWmd3Mz58YU,17747 +resources/aws/integration/eventbridge.png,sha256=6WPtRvVpCMbMnD8pGlVrUGWlPkpJqrv9L-C7HKfoq0k,36385 +resources/aws/integration/express-workflows.png,sha256=P7NscxV_fY3HscuBaXi52SRTg7cdY9rEvhP3Gw7DZMA,18508 +resources/aws/integration/mq.png,sha256=yZ3jQjRuI2NT51WXQ3jIfN8m2-nGqtv6dL8WwCeCTaI,19263 +resources/aws/integration/simple-notification-service-sns-email-notification.png,sha256=ziLcoz5VXxKLqMgG80kO4-p-Ox9LMb9-03exyNgj8_E,13456 +resources/aws/integration/simple-notification-service-sns-http-notification.png,sha256=uRjgy8TKaA_bdjGOVpey0aOET8thl4XRZ0zEw7oR9iY,11574 +resources/aws/integration/simple-notification-service-sns-topic.png,sha256=DfkkZ3XRG-GbMA1hTssKCNd2hKEiOjisfYZq5apPQ2k,7189 +resources/aws/integration/simple-notification-service-sns.png,sha256=nB0sAO75cvtKV8eKXfx5OLhCR2mEKP6gxVAsivXb0Vs,17114 +resources/aws/integration/simple-queue-service-sqs-message.png,sha256=-6cSrKpLN_EvJt8bimcyZ4L96o5Cyy5tUgGEmulT4og,7262 +resources/aws/integration/simple-queue-service-sqs-queue.png,sha256=OBFY43jHIfi98W2XlMnityuEUXZCfNiyA0g2kNGyX7U,4710 +resources/aws/integration/simple-queue-service-sqs.png,sha256=npyLOqcmNLJGPu1e4lqu0WKNSdMCd3wdLxrA-2bPxpo,22846 +resources/aws/integration/step-functions.png,sha256=DCwoq8Fflr27VxZGdQp32W5hvFpRqom-gDC7WBrbeNA,10115 +resources/aws/iot/freertos.png,sha256=NKzvjvZIzAuFlBy7JQxnw9XRZAvz8puwWK9M7hvcais,12367 +resources/aws/iot/internet-of-things.png,sha256=3DdIlwtpNqRfAb0BNRrZtOGlfxu0tpcuW8jwL7_9vXg,19116 +resources/aws/iot/iot-1-click.png,sha256=9JgyWdKrvd2_ZRzxHqloBIwiFQQFrlKMTp_v4HpuZf0,16764 +resources/aws/iot/iot-action.png,sha256=LHWZn0NxJ1WEOEvfDcqhP6ILUPnc542pm7L0audRh-Y,4516 +resources/aws/iot/iot-actuator.png,sha256=2zvDv_MoR-e-fxx6pzL7qqHsGxZ5aRbmP5btAzjtcWI,17560 +resources/aws/iot/iot-alexa-echo.png,sha256=7avrAC5EYO1rgJ6FSa0KnTT07fv-nqOdgXpNaAsVbOE,3244 +resources/aws/iot/iot-alexa-enabled-device.png,sha256=_I_aBCzXFsColAQg0hkef2PPMwp5aYdS74Q10V3gIbo,15791 +resources/aws/iot/iot-alexa-skill.png,sha256=9wdT0-i1-Ky7IxHERRddFtgzD9KqhLlT6Ra12Si59Mg,5313 +resources/aws/iot/iot-alexa-voice-service.png,sha256=i3lpl9D_1wb3yodII0Y6RqjbWxti2_4VtC2G_zxptK0,20407 +resources/aws/iot/iot-analytics-channel.png,sha256=7tuDKbEhdVIjjhkBGjLbSxadjQpcaGj2y9uEhVRJ3sU,22670 +resources/aws/iot/iot-analytics-data-set.png,sha256=R7u0hd-qalCd18azwG0YdizAmSCKBvX0WDWkROwVByc,2814 +resources/aws/iot/iot-analytics-data-store.png,sha256=k7fAiwiwoCqohDcLnLozwr9kjAZ1PUTivGsNz-fa7EM,13941 +resources/aws/iot/iot-analytics-notebook.png,sha256=6Kx9VX_rMMMe7brNnQhDdabWhRdNtZGH0luaq8f5i8c,3790 +resources/aws/iot/iot-analytics-pipeline.png,sha256=v83Z2kvjN1pB1Djs3CSiwGsYdpCe_olVuGEGVqwjNSI,2952 +resources/aws/iot/iot-analytics.png,sha256=Ta2YH546hsm1uZj5IkyBvSKuFAuT-HGdaxgt8EYT4RQ,18746 +resources/aws/iot/iot-bank.png,sha256=UGYY1cCypvGnu40h2aFrbfb2lzXD9jls51UZEUNBULY,11658 +resources/aws/iot/iot-bicycle.png,sha256=TenBVgdikiY5RJ7fRTn-hazZBIYYFcWC60e-jSmBRBg,13481 +resources/aws/iot/iot-button.png,sha256=5Xjfpt0TQC6OlAv3xuj3HSE_ufiMOZAekQWsSNH2v68,12481 +resources/aws/iot/iot-camera.png,sha256=fUVZpx65_UnyrKv7nogofaxIKOIwFuUx0LyARm2MJFw,4001 +resources/aws/iot/iot-car.png,sha256=LEZh8u2KQdWTyN1dog2QnFMahq4soSGlHdiugnW2BN4,11093 +resources/aws/iot/iot-cart.png,sha256=Zvvph0TW-NyGtmUXDqh0vZF0y_YMtrQeyJIYiXL4U0o,10435 +resources/aws/iot/iot-certificate.png,sha256=hhjqnuTgEMI7UL7GiemL2YcKWF13jAN0wWLzQeyq_9I,5479 +resources/aws/iot/iot-coffee-pot.png,sha256=22NDPxNX2gM6Pvl--MwrOZTIORoNAnm9UqYl4eUaM7w,12861 +resources/aws/iot/iot-core.png,sha256=uQSl98VQGbp7veUi_uKUmGBkadkGfa2U-wxfPXv_bVI,16331 +resources/aws/iot/iot-desired-state.png,sha256=LIAV3RST1KwJEd88odoTb-fqOadd98ajrnF5_pv_euk,11105 +resources/aws/iot/iot-device-defender.png,sha256=tWz-qhgrUDKhzztKlXQbN8anfQla4u0Gwnkf5dnBUg0,14149 +resources/aws/iot/iot-device-gateway.png,sha256=UBRQ08-n94YpDH5egwmzrhcD-_aCQkVf8HlD0M54TQI,11046 +resources/aws/iot/iot-device-management.png,sha256=sLtHUIHUbJEDuF4VwDuyaG5RZsPS7-Kn7gUAaIpZ5SY,19217 +resources/aws/iot/iot-door-lock.png,sha256=YA5Z-GIzr4NDnt7LCRciUi0pl8aexKIOUdePbDHBvBM,9006 +resources/aws/iot/iot-events.png,sha256=y0gf0juRhMJP4P1HkUiL6w9jeWKmE8OHjwsWPgfV8FE,16778 +resources/aws/iot/iot-factory.png,sha256=QQJNTSqisIM25pzcfVICOoENWeJ39COqzDcMTwpXDZs,9716 +resources/aws/iot/iot-fire-tv-stick.png,sha256=8D9nUJ-Y9LFW_i40oPsILj61XdKaSdXcJu25Jadkv7w,6376 +resources/aws/iot/iot-fire-tv.png,sha256=f6CcTYbjuTuyptDOS7RfDaXO4rp9OYwxraUtQwT2Sms,6489 +resources/aws/iot/iot-generic.png,sha256=C0rRQTOTIjoE6ccLtRahI-e9lXoYL3oFYaQdWmxloRA,7973 +resources/aws/iot/iot-greengrass-connector.png,sha256=lwc4G5cD85pZlmQxQjRD6YumIg9ylZdyf9mkjMrbZ1s,2569 +resources/aws/iot/iot-greengrass.png,sha256=xUB56RZ4vL3bmAfeZO5pmD-egU2UKwQWW1sYouvb-us,13440 +resources/aws/iot/iot-hardware-board.png,sha256=EcYUSs76Q4hxNygWZBafD_ErWNXYyyJbssRoyvWf6xY,4136 +resources/aws/iot/iot-house.png,sha256=Iwd5XEcHwOqh2IyUExZYPbpRGOzZnpc29C-GhgzKr2w,9726 +resources/aws/iot/iot-http.png,sha256=7cQWhnBt4hyunNs2W-ghMltEuWi1KtF_QHkyHSpBmKI,2092 +resources/aws/iot/iot-http2.png,sha256=DZoAJcbnvFKEozKZjRygnghf24bGbs3HLrqLtAb1Gas,2662 +resources/aws/iot/iot-jobs.png,sha256=-30eYItIKuzfoHpbbHakXEWxbHmwbcmcL9rD0vRkFw4,4394 +resources/aws/iot/iot-lambda.png,sha256=vISgVTFocMRX6kpv9jCmODidZvLO_2sykkqCgu48RGw,5576 +resources/aws/iot/iot-lightbulb.png,sha256=xSZweUdtDcE4RuM-Zicn4IE_ad65rnUmPBFCgErKQ6Y,9944 +resources/aws/iot/iot-medical-emergency.png,sha256=LOFraMg5BZ_CE4yhVEzFMRggRyxvRZbxA35aVs5g-Ls,6794 +resources/aws/iot/iot-mqtt.png,sha256=ceznsAovxcK8jTKDWXv9MgCDpSQHt8EeUtJchgbHpTA,2734 +resources/aws/iot/iot-over-the-air-update.png,sha256=NXT9bwYmXUhlYLwkzVED7eh4oENnSkjKf-scRXb6uc0,16070 +resources/aws/iot/iot-policy-emergency.png,sha256=rsB_fZDu-GyoooF19vblgZZvIR6-MoFc4gbkq0SEHs8,3750 +resources/aws/iot/iot-policy.png,sha256=--JFK39K-D95xia6oLjEaq4sgZbjfsQE_TMcAbDjDgs,4809 +resources/aws/iot/iot-reported-state.png,sha256=ERcdnq-dMgZ-hsQaDXUq7mNmwLglykmqBOqOE6ZSvBE,8570 +resources/aws/iot/iot-rule.png,sha256=XJKbwjiDcL5PffE5lTlOp97-dVDUthZw4rhfi13v3tQ,3513 +resources/aws/iot/iot-sensor.png,sha256=3_LsPNIoUj82oQvhmKVASMb9vXRcqt4leo5zJYtmDbQ,15018 +resources/aws/iot/iot-servo.png,sha256=5_lIZJiXoJ33OxMvoEL_L6bC3GWwNF72EUWRHgNjSQY,11739 +resources/aws/iot/iot-shadow.png,sha256=S8yEJSVzhHtDMYoQ-TJ-JLxpzFGCSyMh7by1oXrbEcM,4963 +resources/aws/iot/iot-simulator.png,sha256=WbswVDJDDhD0Nk-F1S25cA_5o7bMIR-76wMpFyQ4a8o,15477 +resources/aws/iot/iot-sitewise.png,sha256=yTxncvh-w6ffAShefVTTeyp4HidvDuWbUgdro_kzdZU,14141 +resources/aws/iot/iot-thermostat.png,sha256=Xr0zpU3IecuukqiYe5WTHkNT6Rcr-LUR0hDehGvqVAg,16749 +resources/aws/iot/iot-things-graph.png,sha256=ofoiwL8jjuy8P2zxHjJsHguzXTWhMh9N8Tfkd8GdrPg,20698 +resources/aws/iot/iot-topic.png,sha256=q74DSfUAvPl0Lv-1q-Swo8sl9pxf3Gbw6591sWN-C6I,3116 +resources/aws/iot/iot-travel.png,sha256=EHn7SqdjLB6DHUieBH0jrhqSZ2H-jNAmLvr0k73adoo,19348 +resources/aws/iot/iot-utility.png,sha256=xavXlF3LYUNUPqUJ7H0O80jkwAnvD5X11zYovXoaD5U,9243 +resources/aws/iot/iot-windfarm.png,sha256=EekqAdeFbrzZaVKRRJtDCaKXct0UFGUMuPqlV_xWcAw,9253 +resources/aws/management/amazon-devops-guru.png,sha256=nZ_Vr7TrrBiCxsoRKBGDwV77nrpjCwwkmMTYyznnUTE,43175 +resources/aws/management/amazon-managed-grafana.png,sha256=uTx7lkef9_ktaB8zLHoEP44JIjc62-bysr8lhb6J7Ss,35309 +resources/aws/management/amazon-managed-prometheus.png,sha256=_BJ301Iwyjuyit4OvvA5zlohbY21kTt4YM0n_6WE_pk,17568 +resources/aws/management/amazon-managed-workflows-apache-airflow.png,sha256=3cQSgBqsd6wUNf3IOPVvAgqbsA4EG-xO6Z8DLV-u_hA,46911 +resources/aws/management/auto-scaling.png,sha256=P853NRdYqgqTb14RHLYEhD82gPxADvVtRjU3iaCLm5M,4846 +resources/aws/management/chatbot.png,sha256=PVeS-PS8gXuVXrHMA2P9WgrX4-Ij6Mp0W_rJNKzRLJ0,23536 +resources/aws/management/cloudformation-change-set.png,sha256=60HZoZIVr32vq5x2M36pvm7Zyo8H2hKnm6iRTssGKzg,16423 +resources/aws/management/cloudformation-stack.png,sha256=nLwOHrx9FkvriDtR6QZOqw5msXMo9G-rt7akHO-cxbs,2572 +resources/aws/management/cloudformation-template.png,sha256=lfcnl9CQNpmaRNIDNuAzFUY3Qb-1FBhhpgT6uYt9yMc,3580 +resources/aws/management/cloudformation.png,sha256=WSoXYoJpIN75XFi0DArCO_Q9VRDpuuOQ6SzzfdslBqw,4033 +resources/aws/management/cloudtrail.png,sha256=SZb9EslbT5lkdB6DWOOP_NI5Ox-f4Gz1UjfUZoIlpFA,4384 +resources/aws/management/cloudwatch-alarm.png,sha256=QKMcrQLhvmxSbBlAwQd-hGivGaW_IOSHCCngOuORB-8,7606 +resources/aws/management/cloudwatch-event-event-based.png,sha256=9bc-4uKS44AK2BfL86_QBLLs4rvU62QWQ6o29Nfuk4g,3747 +resources/aws/management/cloudwatch-event-time-based.png,sha256=4xcO_l9tAu8TD-nFexP7s9kZnvCMf8gCrIYOX5sMihU,8124 +resources/aws/management/cloudwatch-logs.png,sha256=rFzhBi5sm2H22ysUqPrQq8bcrKPVcKPXmvq31cXdLN8,9181 +resources/aws/management/cloudwatch-rule.png,sha256=QKcbl6s4PNxl4F8eaNm0EIX5K1psYlAQLm0xh6b7u4o,10735 +resources/aws/management/cloudwatch.png,sha256=8r_nvKfhdFWmaDZPvGfWHioaLvFqpjn5YPx1QJ1Qc9g,7797 +resources/aws/management/codeguru.png,sha256=1ehJIBYjoubYC4wo9CfW4rDYf8ikXOOuePyqbCu5c7E,7308 +resources/aws/management/command-line-interface.png,sha256=kXpgovQfMdGphvJcVFsmY5FXoMTe5ZHizXgCSvCq5jA,2044 +resources/aws/management/config.png,sha256=IypjJCU38Acczs1N9Iz15xCsVqvED1ZN3SyQDvY6DAU,6252 +resources/aws/management/control-tower.png,sha256=qSbh2bdjd_djZ1jNifOoAn21vvDYlgNzJi9Agwf4xpM,5819 +resources/aws/management/license-manager.png,sha256=YxFEGJzIdHJbxdrvlSeJjUkUMcQU2Nw1SAy2DpKaIYk,6279 +resources/aws/management/managed-services.png,sha256=m7e8O-p7FnpsbERvZ6kQ4tbrPYgzJxjpl6UWHajamqw,11711 +resources/aws/management/management-and-governance.png,sha256=OJa2FZG2ZWGGITY5o3jVQ2O3pTZoKrBxRzSemm-nL9U,14909 +resources/aws/management/management-console.png,sha256=KvKpzYoYdBr_S0mh47WQYKzm-iVdgRLMEeFfD_BfI8Y,5098 +resources/aws/management/opsworks-apps.png,sha256=8AUzOZlF_CnjLxlYaXGqEcQo5mmEXo2BAcXj8xXz-68,1567 +resources/aws/management/opsworks-deployments.png,sha256=ndxZPLYEzhlYj-Bxm0ls7VmghQ_7VqtT4OCmUDH61FI,8766 +resources/aws/management/opsworks-instances.png,sha256=_Bzl3rmGuh7WYyN_3Os42_WhkCxMj__HGkVYQa_3_8E,4571 +resources/aws/management/opsworks-layers.png,sha256=8zPsOxxB_NgWJn4qVQxLFWX_cxmHgTYlUuWHD_4-9LE,1739 +resources/aws/management/opsworks-monitoring.png,sha256=j5nabJE8jxi--HeIe0Soh-00g86ZCK_V2Dx09BQb4IU,9489 +resources/aws/management/opsworks-permissions.png,sha256=yOsFu67lKAWd4WW_SHL0MVytpbCfZ-uCummfyLQaLfY,6757 +resources/aws/management/opsworks-resources.png,sha256=_Y0WL1M-xPExKdyIIny70nODYWI6MjuGs4Ka32W3_hI,16358 +resources/aws/management/opsworks-stack.png,sha256=duJkTiDEuZRkYeWgSZ5fJlv7h0y87UW4oDJtBMO4Ykk,1224 +resources/aws/management/opsworks.png,sha256=9pQi7gG_Dc800hOwPQ6PKVnxAfFoMaUyr3znsrNtFIM,5448 +resources/aws/management/organizations-account.png,sha256=D_j1gbGSzoxaLQ-__b447IO8Fv14rsMQKKWExG8SOvU,16471 +resources/aws/management/organizations-organizational-unit.png,sha256=ca1l3LRHeBQlx631f6-DdMHjLhn4yOmlV4S57e0vIg4,11950 +resources/aws/management/organizations.png,sha256=-8zaamqstLG-NoymLU9MvBGw1qirG421m4Af7TAXWP0,3766 +resources/aws/management/personal-health-dashboard.png,sha256=XhSVDdy6OVYl_7IDD1WoqZDsbQcHkNYT0oZFPsuiXFY,24533 +resources/aws/management/proton.png,sha256=h3tGRoQSnbL6XdVwmpuw52gn9mSUtPqP6BAltY8E15I,32658 +resources/aws/management/service-catalog.png,sha256=cvxyYZtv8w3P67pbqMzMN6-aRYtbb4wxWYpUxMD_q60,9150 +resources/aws/management/systems-manager-app-config.png,sha256=NNbYNscnNXCKxsEu9WkAS2s08YV1bx6wcLd7e8Oobi8,42528 +resources/aws/management/systems-manager-automation.png,sha256=IkDlU76IFFpDqgJmZ8cl05yaIaBvtm7lAJo6_0ivvsk,24339 +resources/aws/management/systems-manager-documents.png,sha256=ibqQzQskLqlPsfe5CPyGRiqyWcJ_BLcR7cRsYVUB07A,3985 +resources/aws/management/systems-manager-inventory.png,sha256=orVXIAGbNrnXyeIQQyFEZCH8Jk6FptNmZg6efjbp9Qk,8529 +resources/aws/management/systems-manager-maintenance-windows.png,sha256=YsQ9QITxXpse7kOyWmysnRC8V5HEorx-wuLlb6tDPfk,22983 +resources/aws/management/systems-manager-opscenter.png,sha256=D5GzWTaZewj8K05oN7hXAF1YB5X2QuC2D8RinCxgE0I,20084 +resources/aws/management/systems-manager-parameter-store.png,sha256=5L-y53ORapdJ-MgfcsTs-vw4kRGE0Cxojr98RIIADA0,2763 +resources/aws/management/systems-manager-patch-manager.png,sha256=xwQaH_ov1tkz2Rxndo2x608IRrwN39wj48HOJSeoeNo,9308 +resources/aws/management/systems-manager-run-command.png,sha256=H_8o_P6AGTdDQguqwa5B3PNttsIb6_02AUHGk6On3aU,9807 +resources/aws/management/systems-manager-state-manager.png,sha256=vDnKDhMhYMzQwQgM_Ifp4oJ6W4-VvLmeYk4C9oJQzzg,13158 +resources/aws/management/systems-manager.png,sha256=CG5C9V8y0d1mbx0Ifp6fhslu1l0tZK4Ic7BlHpHZLf8,7636 +resources/aws/management/trusted-advisor-checklist-cost.png,sha256=wqAdAoL-EelVnfRtnt-BpcWj-8W2sXKEqTLLnKdUhZs,12675 +resources/aws/management/trusted-advisor-checklist-fault-tolerant.png,sha256=R0gtGDbG7xtZIJMgb6ho922hKZcYUYpFPI05OsqNxFc,14007 +resources/aws/management/trusted-advisor-checklist-performance.png,sha256=3T7wtkuWIkB0R7cHnTP3rSx7fnsvTjFEVvpQQp4xomY,11633 +resources/aws/management/trusted-advisor-checklist-security.png,sha256=yS1WnCoj3e-GwCgFcviApNXcLx9XpV3pHQnsXnvwa8g,9773 +resources/aws/management/trusted-advisor-checklist.png,sha256=Jh_Rk-XKfMIZtoBFZ87kTjiK89MER39zmBVlTBqiW2o,8392 +resources/aws/management/trusted-advisor.png,sha256=kAKerAllcJECq-pQ6QdxNNueJzrNkQlS4-sKk3eR9so,7549 +resources/aws/management/well-architected-tool.png,sha256=Y78XvVV4ZS4foPfZ3AT4fTkQ8H-2mEV-nkC3gjWsVL0,8755 +resources/aws/media/elastic-transcoder.png,sha256=xWG_oDurWh4HtUxhVsdMNxWTshFdBB-5DLgxJin1MdM,21906 +resources/aws/media/elemental-conductor.png,sha256=LIdF-GGIz5DI1Rj7PEgG7TJT931qBf7_U1UfrVYmoGI,9730 +resources/aws/media/elemental-delta.png,sha256=LIdF-GGIz5DI1Rj7PEgG7TJT931qBf7_U1UfrVYmoGI,9730 +resources/aws/media/elemental-live.png,sha256=LIdF-GGIz5DI1Rj7PEgG7TJT931qBf7_U1UfrVYmoGI,9730 +resources/aws/media/elemental-mediaconnect.png,sha256=V2cd6Gu7ZZYcllfDN0a8GOXHRzfP4qIUPIYqhEvI1qU,23338 +resources/aws/media/elemental-mediaconvert.png,sha256=1gS2iVqy0zddKBMxTNCeF9zaFR7iW4m6dROsPCrlUR8,18916 +resources/aws/media/elemental-medialive.png,sha256=aAYcex2I8dqAKgBltpabuP-j_TtbfhiCoRVyjt_Fs_Y,19204 +resources/aws/media/elemental-mediapackage.png,sha256=cHFkgq7FUuAOmFV3UYy5CI74mHEhDfwJp3ibaqSSZ9I,21613 +resources/aws/media/elemental-mediastore.png,sha256=tfno5ldoNxiWC5JiOtZH5mCcVu24pGgR8eEc1JTMiWk,20099 +resources/aws/media/elemental-mediatailor.png,sha256=oEv3mtbbw2mMfG3tVxrsd7EsTmG4X82FoKUBvvdHLRE,20405 +resources/aws/media/elemental-server.png,sha256=LIdF-GGIz5DI1Rj7PEgG7TJT931qBf7_U1UfrVYmoGI,9730 +resources/aws/media/kinesis-video-streams.png,sha256=4H3zZRaIXPnVCZ9xBQhMnMSdZy4hQ3TTpuoEzZ2etIA,22611 +resources/aws/media/media-services.png,sha256=Gh0lT-pIxLPsdj3JlxC3MsohHDbocNzhe3wfb1hHXBw,13689 +resources/aws/migration/application-discovery-service.png,sha256=zm0k45MUmLrGPdyt9KRtWOm4qMGTa-HcxnV71NVdfPc,17530 +resources/aws/migration/cloudendure-migration.png,sha256=bC5XgEks4wY1aldN6Ed5p6eJBxR3blSWqKEvJnP6x50,28565 +resources/aws/migration/database-migration-service.png,sha256=KJ2HoClqjjjNS7C5W7vusItRErqiZCYcdqW4WGHjStQ,12777 +resources/aws/migration/datasync-agent.png,sha256=T7tC7VDVGNiXxZ3ANj9H_zPg4reB4CT-yWCkARCUpFY,10736 +resources/aws/migration/datasync.png,sha256=Yi5mwnrIpCHU0tmlkTPvh290MGujTotv5jUmx5pSNDc,13405 +resources/aws/migration/migration-and-transfer.png,sha256=byDy5XTyaDKjJwfzTuW4ZEnUBdjJaiCMjJyIAD2Vm8c,16392 +resources/aws/migration/migration-hub.png,sha256=7TpXsIY8Llt9spkRtRgbwB5y834dbeIXxZV0Ci-E6rI,21004 +resources/aws/migration/server-migration-service.png,sha256=BXvkgSDfyoVnGOQWo0No4Cn7KlsiIF0BgPdGwoK2ees,10630 +resources/aws/migration/snowball-edge.png,sha256=_VyGm7u--CGr4lhDNAcaBcyOy56bdnsmJLh5p7QjC7M,15252 +resources/aws/migration/snowball.png,sha256=7cyLbbUXdlBfSLQEK4aUnz_0QKqN0BPmO4eAITMFB0c,10289 +resources/aws/migration/snowmobile.png,sha256=jBTURG4f7d0X3YQWVm7QV6SKceEi0G-jdN0AAlYu2lE,15811 +resources/aws/migration/transfer-for-sftp.png,sha256=SH2ZFiDUwtsOfdq90yvkzzOK4cSCQ4Y0iUH3hjHujFQ,12066 +resources/aws/ml/apache-mxnet-on-aws.png,sha256=wDKuubONs8RsB643ZpRqzTmApTxUY0HCOVrpkWLOMHY,17849 +resources/aws/ml/augmented-ai.png,sha256=n2Ar9UBH-vl_ON3_ze_4HwIksdfqM1ufmNKUxOkd11c,30751 +resources/aws/ml/bedrock.png,sha256=lwTwGW7rE734e_TdYjet6F8iy2Iq6Tp0OczmvAjAndk,9392 +resources/aws/ml/comprehend.png,sha256=Sx0mm8r9noGbHfNHTivEzHIpHcpvSEy46Ge2dh-zwWs,10944 +resources/aws/ml/deep-learning-amis.png,sha256=yaWCnnNrFwGBc5TT-lQBd0QQpzsmrkiVG7dOnWh71nY,20776 +resources/aws/ml/deep-learning-containers.png,sha256=ZAWkq2pTirLZ2tf_JDK5JjtUqECruYy6yNXyM4MIttU,15417 +resources/aws/ml/deepcomposer.png,sha256=uVLMklTwsaSrU0I_MdWst3ans6Zm4xo-uzugtKpg3JM,19866 +resources/aws/ml/deeplens.png,sha256=cTTRJrgDJTabO-U2Yvl8G7ARFP43XURcs8fI8VLBH2g,11948 +resources/aws/ml/deepracer.png,sha256=WDSADoTmKj2F0d5aTbaXXcrh_Y-93eiOuaCoFzTQjMc,17787 +resources/aws/ml/elastic-inference.png,sha256=B1BmRH24FDCGOuYcRJXJNoIYzFVRbix5hdjeJqF-O5I,20506 +resources/aws/ml/forecast.png,sha256=_HIfSt1_sHCzfki44ye41u5cYVYwpePdcCApeg0lCQE,10188 +resources/aws/ml/fraud-detector.png,sha256=kDUSERjfnSzjqZQWjDkI97u-kQ4_CzOg-vL20dSwDVg,34278 +resources/aws/ml/kendra.png,sha256=UV5nAOG7K56QILDemgIjOt8Ngkzo7IVEmOVGpAdXT18,22455 +resources/aws/ml/lex.png,sha256=uzZkGcYIK0x_zUDXEwdfTu_jBfHo13XlCSjl8GTwupE,12360 +resources/aws/ml/machine-learning.png,sha256=v4ai86kMVVqLdYXDPtHiuxkK2yozfkfmW-OpnL9J-IM,22802 +resources/aws/ml/personalize.png,sha256=qbdRq6hEGfzu68vDTwlS4b8WAbzaw_p7bRe5JHti32E,18805 +resources/aws/ml/polly.png,sha256=H7Mu1dIJYyWbkqRGwdKz1DW7CTrbQgiSfIGQEenx9OU,12434 +resources/aws/ml/rekognition-image.png,sha256=cuyqYVALFcsyQyfEDbwSVWU1hAPaSU61x158cAanho4,28407 +resources/aws/ml/rekognition-video.png,sha256=bHgOCaQdWdDXT3lXBUzREMXZXzrfe8yoPygKP9Uv7gI,27280 +resources/aws/ml/rekognition.png,sha256=UiJMe74wKQL9Ow9kVGx0gkMjDI01nEqZhVVbASlzIyI,18138 +resources/aws/ml/sagemaker-ground-truth.png,sha256=PROsvYYfIROCqPPAm-n3RaRrfCxxH79A7RrZkvNJLwk,18336 +resources/aws/ml/sagemaker-model.png,sha256=OjfcPha1dahx7yuGAKn7mWo3HNyFwHUCabYvh8f1t_o,5676 +resources/aws/ml/sagemaker-notebook.png,sha256=ZYG0eXPl3jPBGVTo3Wl0evX_gylrdzNv5rBf9R_s5mw,1525 +resources/aws/ml/sagemaker-training-job.png,sha256=qarfxlHOkePLVwoFqO0re2GKxnIFPmArVtZGPTnmxKE,3419 +resources/aws/ml/sagemaker.png,sha256=jhFaoKhN4icNH9-AGIPAD6TOcFRtNmktnGaFlTjNuqI,19713 +resources/aws/ml/tensorflow-on-aws.png,sha256=wDKuubONs8RsB643ZpRqzTmApTxUY0HCOVrpkWLOMHY,17849 +resources/aws/ml/textract.png,sha256=qVkdjzvGp05-_H66HMRULpUW4ur7jzu6fv4VHLX5wSA,15037 +resources/aws/ml/transcribe.png,sha256=XCU_r41prQ5-ol53_958Vo7NoeZ0JvvvkxGwDdZ8uTE,12524 +resources/aws/ml/translate.png,sha256=co1LMmPLsYVH3RYQGJkjn3an6basmFvPytIOiA9Wklw,12723 +resources/aws/mobile/amplify.png,sha256=saeBYRwOqPLxqAdRLj0l2ZscVNkQ-zJBHX7tR43MEtc,15661 +resources/aws/mobile/api-gateway-endpoint.png,sha256=GnXoGUxcSfB4eYD0C6CUojXwwLYYjOPnXif_O1b7ajE,3007 +resources/aws/mobile/api-gateway.png,sha256=LRee7nrPSq6UsDRyzO7FvEiE3jB1bW1SZ962rwBOZ4w,11707 +resources/aws/mobile/appsync.png,sha256=PhDi86DH-ryZlOs6s2FFXrjW_8L2ReaL8f5bdXcft5c,15783 +resources/aws/mobile/device-farm.png,sha256=QaBwu6GoqhMSTRbB92oXSVHn3JStmQEZm0wwZAIDrUE,19699 +resources/aws/mobile/mobile.png,sha256=kfz0NfJFmwJe9EfD_I1r_WB3Jq5TAUO_vpDDYxDKI3Q,14233 +resources/aws/mobile/pinpoint.png,sha256=y3HPnAyVUiM-Eh7kELaTJAPhFMB6oT8nQIpAMFf3uB0,18657 +resources/aws/network/api-gateway-endpoint.png,sha256=C9pgsaKc_OgxpGCoLPug9XWfG9EtjgdogkP5t7s9jUM,14286 +resources/aws/network/api-gateway.png,sha256=_zewQ1lr91DJ7iiJNzOjY-IY2l28_GOmmJEsURI2iRY,11886 +resources/aws/network/app-mesh.png,sha256=QJSHvw55E5L955-fGnQOzn3vjsVCKUj-Jl8QTG3c4Gc,14255 +resources/aws/network/client-vpn.png,sha256=iFLVSrbalgAfcTpqRF5RWdhMGuVQcWe01WSZtsuumJw,14111 +resources/aws/network/cloud-map.png,sha256=fVyyswSnwCFCJ2f2yixlbs_ptxpUh2mG_1uf-E2rUHA,15233 +resources/aws/network/cloudfront-download-distribution.png,sha256=3Nhgcxg93B_kbir2k-N1sS_8Cls9hz-yrB-iWMxH318,18082 +resources/aws/network/cloudfront-edge-location.png,sha256=NHOwon6tEe6jRfryxkyq_71BeqMB2RpJ7W-HQYwpvv4,4703 +resources/aws/network/cloudfront-streaming-distribution.png,sha256=fNxNCI9z0TwKfmsXC3rdWAn-0s4r6nbmOfRnVBuW5cM,16939 +resources/aws/network/cloudfront.png,sha256=G89ZZRXs5L4K3FLqNw3dYR59iPEfg-hb3hks5JWkapo,19109 +resources/aws/network/direct-connect.png,sha256=F5ibLhxlNTwkNtj0LrfoZKKZhu5y2xhD99Wjfb4iipk,15067 +resources/aws/network/elastic-load-balancing.png,sha256=L2_464odONjh2yqhYBXUdrjYyyuwxpaCPrxfXFwB7JY,15697 +resources/aws/network/elb-application-load-balancer.png,sha256=89gIfNUQ8-zKu8O5VP_kzfvlqrxTnBd7sqMOxSKECqE,17493 +resources/aws/network/elb-classic-load-balancer.png,sha256=hDuJ2ZnLV67WoCKa9Fzri2clM6Tzr9jQBLnDnSnPvKI,26243 +resources/aws/network/elb-network-load-balancer.png,sha256=aq7mP-2GPuvoSH0CaoL-WWeNP6uuz7JnP4hcFLSjsTM,22711 +resources/aws/network/endpoint.png,sha256=xRBaWHwhrbKD-i_fgCVx0cjgLuil12uxFEcy3_plkxA,5852 +resources/aws/network/global-accelerator.png,sha256=cIx4bw506h5aID7rTbes2IXK90V0p5AJfxeJqbsKem8,20579 +resources/aws/network/internet-gateway.png,sha256=sv-Y5ZeRVaOlJ9u1vwu2gv7IoBU8VfD8JwVZAAMkS6g,4749 +resources/aws/network/nacl.png,sha256=i1JSsVcdEqw4N0tW_9_YmPsP3q9A44zqgo2SNwWBEv0,5027 +resources/aws/network/nat-gateway.png,sha256=uyzoCC4AfpMbkju4lzqkFpEopXp_hI9GqFyMFxPyaGY,5168 +resources/aws/network/network-firewall.png,sha256=0q4QsAHo6qjKdmBhGuWbV1TDujfMirBdBqBI0CB333Y,31090 +resources/aws/network/networking-and-content-delivery.png,sha256=1OOagG48d4zbB5tySoqrm-XVCLQq0kJ3n3qfOyBb9Go,18179 +resources/aws/network/private-subnet.png,sha256=vUuDcpVnZrcII_G5uT85hMhEnjj5ObzQI7tHN2XcK7c,1750 +resources/aws/network/privatelink.png,sha256=nY0T6BZTlCJurOzmsA6W4l9NS72ZsY-m3ElB0SmrXQc,14336 +resources/aws/network/public-subnet.png,sha256=aeXi26Cpu3zi959UkbGvwtV8A2ArvWCSVarAMOkr_5Y,1796 +resources/aws/network/route-53-hosted-zone.png,sha256=SFtMArsY5chNAx02gkh6AFzcwbr4ikeM1Bu6JZQdxAM,17435 +resources/aws/network/route-53.png,sha256=0GnFeMt4k6ArNwI7L-qVPTFvMf1u7tymH4nPVVaUfI0,21253 +resources/aws/network/route-table.png,sha256=VNArSlHS5tbFAaARIbp0Zn_1goBV_i1DTCj01hXNG88,5865 +resources/aws/network/site-to-site-vpn.png,sha256=64ZcxkTWDXGmGJrbtPCfRCDFEKneLsgvE89T8pM4c7A,10857 +resources/aws/network/transit-gateway-attachment.png,sha256=k1ukXf_Tdr-aEiDKZqgHswxJjpjCcul-UC2RTpK2Fcc,33132 +resources/aws/network/transit-gateway.png,sha256=YzyDRpEyIxho0mT78DtUOBbD03Y8y71K2GH7l7f0xOQ,17774 +resources/aws/network/vpc-customer-gateway.png,sha256=U469vHBYKh7j2SlMM4GPJ_vJo2vofdvSTJgcsOXAExE,19903 +resources/aws/network/vpc-elastic-network-adapter.png,sha256=Kq1WOgN95FCWYnaFh0HKtv_AcnuERJ1rFqaIxRsgaFM,12052 +resources/aws/network/vpc-elastic-network-interface.png,sha256=BA9phWaSvaWfjc2FTmt_rqKYpkvU2X1PoeMZHdy5_ig,17002 +resources/aws/network/vpc-flow-logs.png,sha256=tbtjClZb371bo_wO1PduBahvG4HXpPSYz5-tMpe02GA,17721 +resources/aws/network/vpc-peering.png,sha256=FfCLc6ciXEQSePyPTvoWfj-YpsUi0E1FCnM63TcxAQM,5655 +resources/aws/network/vpc-router.png,sha256=AL5-1HWLYijpX-ngMATzjJEMQg7GxRA1zePoD8bZexk,4862 +resources/aws/network/vpc-traffic-mirroring.png,sha256=nEUsUnN379N9v5TeRdEE41O0MNambr4ey6MjKuy3bV4,21573 +resources/aws/network/vpc.png,sha256=1dLisiAkpGVVvaJ_ZCoieUqjcN2Omhe4KeSEve4kAdQ,15305 +resources/aws/network/vpn-connection.png,sha256=d2rb-Fw8BRykPUZoO1SrgSqGISAxdaVm_Fr_avRRFR0,10724 +resources/aws/network/vpn-gateway.png,sha256=sUrN2BfXb4V0CM4ZBvRfQHggA-m-0eG27DA5bapQdRI,18613 +resources/aws/quantum/braket.png,sha256=vX88zypsNZg0hahN0YvIW7Svxvb45XWatx2RHLNvNBs,17630 +resources/aws/quantum/quantum-technologies.png,sha256=__1L2LpA9ASIoOi8e2Ug7UQk1OxwNofkDZQMhuB80ok,20005 +resources/aws/robotics/robomaker-cloud-extension-ros.png,sha256=_uSYUHdB010tsk8qPUSrBm6I_n5Gv28T9y8kUymKI-s,29957 +resources/aws/robotics/robomaker-development-environment.png,sha256=VVGuVPWE0EfLLbtczJ5VjOoRrTYWQ9gwO8RJnbrkP50,25356 +resources/aws/robotics/robomaker-fleet-management.png,sha256=KhuqDtufSmQ1dBVY6OQqqq7fUDu-Q4lJdyNHnsR-WUI,33146 +resources/aws/robotics/robomaker-simulator.png,sha256=luvFWZewY7Y3CmEwsIPi7weRlJ2yWnuTbOLmlI89GRM,4185 +resources/aws/robotics/robomaker.png,sha256=qJNEhRpuZtAGSl0SA6NW9oZT1B7NwRpmtSFGPb13kms,7375 +resources/aws/robotics/robotics.png,sha256=tQWIFI1TRorAU9gRu2A4GnR8k2D3tr0dyJ945bFXRNU,7747 +resources/aws/satellite/ground-station.png,sha256=eaCrbWxzw4rpc_-lBs6YfjguiJdV16B6Cc6hhr3T25c,13722 +resources/aws/satellite/satellite.png,sha256=MHi45aUo4YOFcg-3iJjfs3J6nxqDi5M6qrnn5ic_z1o,21141 +resources/aws/security/ad-connector.png,sha256=seGiL02GJm_wfVJMZgzKLBytMgTsoqIlYr38FdUr8Lg,15617 +resources/aws/security/artifact.png,sha256=rIzi9p3jN_Bbp5JFdsBP0gdabJU6phiBYTMUHC4KxwQ,14025 +resources/aws/security/certificate-authority.png,sha256=mpUoQA6lUDcCqBKMUa3i68oWnwyZ0j6m_SoZJ7ZwDBM,10532 +resources/aws/security/certificate-manager.png,sha256=S0U5imprcZ7dLp0I5bF6xCSumfWzj-R998u6ri4Ozcg,9750 +resources/aws/security/cloud-directory.png,sha256=Z_0-ZjGTaKX76OOirkjOJm28t4Er8LdtrQqJLygYfFw,14187 +resources/aws/security/cloudhsm.png,sha256=Ycwz8jwrnNggCNNpxCk2PQpmllZy-uEtn-laCikO5_4,14428 +resources/aws/security/cognito.png,sha256=k9eH--tabD7_PwMH69CBHt7NLbPW5Dx6AB-YzwLNN18,13257 +resources/aws/security/detective.png,sha256=HuzoerTZno_nn1joByAGjQcxdeY2bG_Hv6MrZLHh454,23415 +resources/aws/security/directory-service.png,sha256=D7i3yAXgb7ZpDMQKRUSsOnMUGLP5_aVTVFSwEv4duBM,10953 +resources/aws/security/firewall-manager.png,sha256=vGVmJr2vy4PnJ7S4S8JOuiAYBgTVaw7oesum9jR7rRk,19005 +resources/aws/security/guardduty.png,sha256=Bp1l9Gulo4VIqmaMUmjBK0EUBXoE2esRRSbmTeY6Fms,17079 +resources/aws/security/identity-and-access-management-iam-access-analyzer.png,sha256=inZ1pppBpKR_g5nZ-x4XYL4yv4c5hus5pFlNHelzleU,650 +resources/aws/security/identity-and-access-management-iam-add-on.png,sha256=W9TAoZyySp1_uwjEQPRaPLvdPG0sqLPjc3WigBqYwQQ,12961 +resources/aws/security/identity-and-access-management-iam-aws-sts-alternate.png,sha256=6sVljTuxNQkYVls1WFEgUcZKmxH1zIpSIP3ImbuEFnA,12686 +resources/aws/security/identity-and-access-management-iam-aws-sts.png,sha256=jGyNoJ6sdLNzxCd8eedbPUqBQ_4gV0CEGK2irJiB77o,3605 +resources/aws/security/identity-and-access-management-iam-data-encryption-key.png,sha256=ZPckwt0ngUZ-ZSeqgVq3nqOpFW5UpQinb3iiCcvzxZ0,12660 +resources/aws/security/identity-and-access-management-iam-encrypted-data.png,sha256=sm5AZ08jc_Ju9xu8Jt16HFldwyn44Lnj-1IuSkI3JKA,7636 +resources/aws/security/identity-and-access-management-iam-long-term-security-credential.png,sha256=BRfbMX8t2TaYVWqbhlzpJWcmZE4f2xv81H47t1UljlE,19764 +resources/aws/security/identity-and-access-management-iam-mfa-token.png,sha256=z0ciCEc7Ru1Ivj_tm_pYhpKWKdTbrRQ6g_8FgeM81ao,16714 +resources/aws/security/identity-and-access-management-iam-permissions.png,sha256=fvOAurWBVBRt3l381uaeGS0JWPMM-mGGsKQSaF2bctI,2174 +resources/aws/security/identity-and-access-management-iam-role.png,sha256=s4qvAIO4iEzaoM_wYU8KBn1A8bxKgw8t0ideEmmZ4AA,4143 +resources/aws/security/identity-and-access-management-iam-temporary-security-credential.png,sha256=z62_hDjXsnvUes4k-93Xc1erAbNq6iYXJHd5gWX4zM0,25217 +resources/aws/security/identity-and-access-management-iam.png,sha256=-bn1--BiqCo6Dv9Ju6IJVhsRgVyR2YBldszHol56i7c,9808 +resources/aws/security/inspector-agent.png,sha256=WDpFjXbCUkns9V0B6jgRqG64oN6QEsUXxdl23W4Q0tQ,28448 +resources/aws/security/inspector.png,sha256=Qj2u1kpZz6iIZpWaIUMOnvSXjB6WO7OEyywLZYPw1l4,18580 +resources/aws/security/key-management-service.png,sha256=ltV4pvsr1ugUtQKPXHwNbDoPiCFzZmdWKxmiZud77AE,13937 +resources/aws/security/macie.png,sha256=NrAmBWL-rC36e5sRy7nSqYFO-E5aSumyQyoLVS2OkaU,14783 +resources/aws/security/managed-microsoft-ad.png,sha256=r51Hpf3Iwx7nxm58DurWHVM7nBTX5a9z4ofmNNkojv8,14990 +resources/aws/security/resource-access-manager.png,sha256=_JqN8X4WSF-Lp9iz-5iobMrMbILdJWd2VEHsJnwt5LY,14639 +resources/aws/security/secrets-manager.png,sha256=4tkWluD8dhm_wRSj5tECskeh80XrAEYn6f0D7EjeIRw,15057 +resources/aws/security/security-hub-finding.png,sha256=oWqXsfn6O7qF5Xn9luiIsJaSFuDdC9MSK-0-aRvtSTw,38471 +resources/aws/security/security-hub.png,sha256=7qBQx3TtC3lZ7ecZl5jY2tPK-vuTY7N5uL7ZaJ1ug_s,18191 +resources/aws/security/security-identity-and-compliance.png,sha256=mz7aYe2sqHAYf8M_HHaopVWxuLTtH_h6pssNrC2rXcE,17784 +resources/aws/security/shield-advanced.png,sha256=2jHyBoU6nrMtijyprwz3IZa2fLXdBbibckUKUg7wAoY,19861 +resources/aws/security/shield.png,sha256=jd_DnIBewTyLLGotfbpDkFiFJTsXZu0NcPreCw7Dqjo,15392 +resources/aws/security/simple-ad.png,sha256=juR5p7FQP1_ZKI9SSLK52J-YwCL-oytZJTmwkCo2tR4,12495 +resources/aws/security/single-sign-on.png,sha256=o55JJ_wGoBUM5eeSjVNAzwy63ydBw-DwiMMYCa1_Yns,14951 +resources/aws/security/waf-filtering-rule.png,sha256=VENCbVtgoAzSRjTBbK74gT-o43jpZM0i7Dfd4GsJPKA,22610 +resources/aws/security/waf.png,sha256=7Tf2ONt6pbtJa1lF5gZ6A6apCEhO1zXhjklQabfNXj4,17363 +resources/aws/storage/backup.png,sha256=TZhmrrdO2JhPDwakh98ivebwgKlQk3MX2-WCzZIXQZM,17573 +resources/aws/storage/cloudendure-disaster-recovery.png,sha256=eXQVc8iaqEgVA_d4DB1zEmQRMKV5nZxoDW8fvUrlWTI,29886 +resources/aws/storage/efs-infrequentaccess-primary-bg.png,sha256=CVHlaZSllDbyy9BCkX9YoxVU3hGQLqbpa2HmZzAhEos,15600 +resources/aws/storage/efs-standard-primary-bg.png,sha256=uQVOnDPITkDbIGHM8BH5pv0xlK7bOVAIEjPbV_HyrcA,15604 +resources/aws/storage/elastic-block-store-ebs-snapshot.png,sha256=1EQCPwHEl4Tu64tSyNKEOmzk2NUyZ_iHzfLdjPwWuBY,16414 +resources/aws/storage/elastic-block-store-ebs-volume.png,sha256=mZhFDmplGF1aAK8x0A2CZPQVqoWLYwyID22i3D1sGw0,4494 +resources/aws/storage/elastic-block-store-ebs.png,sha256=BoQibrb1Pr68ax2rY8qDfZY-efPjSGcQ4VKEepgWp_s,10478 +resources/aws/storage/elastic-file-system-efs-file-system.png,sha256=q8kBRTPtUKMTUDJvAPhiyJ0PySQsn7S_OLROVgWHaI4,10432 +resources/aws/storage/elastic-file-system-efs.png,sha256=LKo6iTLdXIgNGteUdKG8Zhm1B6MV338XRfWL3f7W5co,13360 +resources/aws/storage/fsx-for-lustre.png,sha256=iDAeSFBIMR6fDnIKlkT0DEx6FrdPNBp1i3pURDY4ohI,12113 +resources/aws/storage/fsx-for-windows-file-server.png,sha256=oAcVVqVCyYKZ4CEe_sB316VYB6GkJqQFBgJJscuIz1Q,10710 +resources/aws/storage/fsx.png,sha256=3mGTHrlq--3zIX6pV1JPJ-CdsNrfcXmaFZjQFM7eOJc,11596 +resources/aws/storage/multiple-volumes-resource.png,sha256=mtn01RxLoCRFqAm3GDrZ4RoHVcZFRJinroPRq5gUmzU,6047 +resources/aws/storage/s3-access-points.png,sha256=oalnMBiRpyEVVBNmPhtwWbbeYUVnBTCR50JYXt2vOQY,12007 +resources/aws/storage/s3-glacier-archive.png,sha256=f_DsBX5nEGlZuGWZlnIoJo_M-YoG-8qLSL_2oBG8h6w,5053 +resources/aws/storage/s3-glacier-vault.png,sha256=VQ41AuLh5KF-m4YbD1VIhcgND8kUd81LjqaYWKpkD9I,13287 +resources/aws/storage/s3-glacier.png,sha256=8Nta9bbWK1OqEvVr0Y7qgXjXwv8mR1MwezzSxhzwqbY,16802 +resources/aws/storage/s3-object-lambda-access-points.png,sha256=WRypGLAJcrLsNfSEkS-NGvB8dzy1IZWPy3E8Cj9nfGo,17761 +resources/aws/storage/simple-storage-service-s3-bucket-with-objects.png,sha256=x5B-ESj_0V5UONRcx3ey_vxMisYzvW7V1mC5ZYYCDFw,24661 +resources/aws/storage/simple-storage-service-s3-bucket.png,sha256=cENVmEzYyMpYtavMEhqliwQZ4H2yu3uwtoBtAsewbYo,19585 +resources/aws/storage/simple-storage-service-s3-object.png,sha256=HRBgJhsZ2OAX0hJzT-GYhgfU_lqycf3Qebeti3gbU9s,13075 +resources/aws/storage/simple-storage-service-s3.png,sha256=ZW_epgm_aN9t0DLVbgvu2_R7JgUftSOKpNhcnFLTP2I,14475 +resources/aws/storage/snow-family-snowball-import-export.png,sha256=_1EUqrYZLY7_4Bu9rGkCYN_c1HlQ_o6OhUHEGLUtbiA,12107 +resources/aws/storage/snowball-edge.png,sha256=OmqhG5JJ660Brsv4JaohrKxOvKKxo3Kom8oj6Ct7dCg,14480 +resources/aws/storage/snowball.png,sha256=FwxDE0q4NrdkUmROvXMKHdiBK5Wc5QPcEMDx2eodjp4,9813 +resources/aws/storage/snowmobile.png,sha256=HhKimzGs9EgOQYdnc9mAWRhNlN5YDPHUdWcPRrOL13Q,14987 +resources/aws/storage/storage-gateway-cached-volume.png,sha256=LIRIE3zUwAH6oH-CoPpjCeeNBRuPnvzzTdovz4icwWE,6900 +resources/aws/storage/storage-gateway-non-cached-volume.png,sha256=SlCZENR6DpV4IE7_zXEPipFVq7gYmrh8fz_zAseQ0QA,1985 +resources/aws/storage/storage-gateway-virtual-tape-library.png,sha256=IjUmMXGZotZBPbTJVjj1XtrInIrwiuUpYmJCkfpN4Lo,11258 +resources/aws/storage/storage-gateway.png,sha256=vT95GQMsSlJ4pFOaO5IuJdtiQKNOvnZ9jpdtHHxkdw4,14042 +resources/aws/storage/storage.png,sha256=mNs44VdzzUvatfP8N6TRsFLT7Ylg2xB8MPtTqpvtMc8,10908 +resources/azure/analytics/analysis-services.png,sha256=5pJhvxYrnHuL2VnEVLsOiBAp7zGiqYspk_SpQt2bQ6A,9356 +resources/azure/analytics/data-explorer-clusters.png,sha256=tQVh4LT8duloxdIcl8yRZyKVG10U_OZJ16A8TPw4MN0,6155 +resources/azure/analytics/data-factories.png,sha256=rEbGZ4hJT76sAu8xCa6_IK_zNOvdJgkBLFSjZ5qAAyE,5774 +resources/azure/analytics/data-lake-analytics.png,sha256=CzVjfJv2Mzliq4svOaJE1k9ulb0GENkKup9yhrSq8Ko,7486 +resources/azure/analytics/data-lake-store-gen1.png,sha256=1j5fT9HaxBzHcdpebc5Vnstz6XiSp_Izn4m6hcONxrc,7068 +resources/azure/analytics/databricks.png,sha256=PptD1YPuh82w3UrKQt1jPoyWOSGTKYhZeBpvHjvZ_fs,12142 +resources/azure/analytics/event-hub-clusters.png,sha256=Wa5WQAtJGNZMdXdsgQnck-RYO4r8PhRBY0d-_v703iI,4471 +resources/azure/analytics/event-hubs.png,sha256=XNjg_kGrdu5poy-Hz8ALvnd3wIwUQNPfl8t5p4N6d0Y,3379 +resources/azure/analytics/hdinsightclusters.png,sha256=F6EDNzwFp3qhzU8KWwnbkacyrxKaofWl5TBYZB4mvT8,12491 +resources/azure/analytics/log-analytics-workspaces.png,sha256=-_ONBNSA20FjT9vHmgQNVclJctB1xIzGZySX_MtHDi8,5585 +resources/azure/analytics/stream-analytics-jobs.png,sha256=aRyd_4OKgKSxX6nAojMsKSVBqdXLqqZpby4cCU1SKLA,12935 +resources/azure/analytics/synapse-analytics.png,sha256=2m-kLaJpvJwpDFcJNaj2t8PBv4HA7MP977YY_Udfc3U,12623 +resources/azure/azure.png,sha256=90ovg19t63YN3gbK36B8cAQDUSjLRluDzaLbcszANps,29822 +resources/azure/compute/app-services.png,sha256=VJ8Y4RGQJ1lBQnuC3j2nlqxLPxrNnuHBfvI7bOJwaN0,29974 +resources/azure/compute/automanaged-vm.png,sha256=KkQCyQjksyXMMSoW5x28XSNPyl9VQM6nu0mmM4v8FWE,11060 +resources/azure/compute/availability-sets.png,sha256=-muEubSGR9Tjb_MmdSPH5-hiGgIWcUyTe7M4PzYioCM,6376 +resources/azure/compute/batch-accounts.png,sha256=YT61JkgCBKVPeCsDySFTTYIEbjgxmG_eq0r4FTvNRSo,5459 +resources/azure/compute/citrix-virtual-desktops-essentials.png,sha256=jAuzdpjd4wWg5DCFIcKSHiS2TxJcEOOwd5Gg3wwEoS4,25608 +resources/azure/compute/cloud-services-classic.png,sha256=3wVb_D7ogJakGQcAX6v8UROwOwvIyZw7hSfpVW_4enk,11663 +resources/azure/compute/cloud-services.png,sha256=-cE6E3D3WTP_6xn19xoPi5Gcu5WswH1e6nrgOUZH8RQ,13459 +resources/azure/compute/cloudsimple-virtual-machines.png,sha256=42wEuY8ussem4Tga4YFzDo4O1BK0Onby1VcnB0_3mhM,8570 +resources/azure/compute/container-apps.png,sha256=OyHWRELmtCMy8YTtlAjK0a3MoNB093QlrjaN14NrQm8,38538 +resources/azure/compute/container-instances.png,sha256=qNELrNQlDF4_qWXN0jfNaUPud9ID1-SzcoTxt_JxAyw,8390 +resources/azure/compute/container-registries.png,sha256=QH8LvaDSfgM8XjXxSTx426oOb43SOZEjR5ENgy3i_50,13609 +resources/azure/compute/disk-encryption-sets.png,sha256=5tLD7z5_0c1r2lvqbDmxDziq7Ce3fu1RyoaWoTdy4RY,14116 +resources/azure/compute/disk-snapshots.png,sha256=qCF5XBLtx-ti-OlR0Lp-kUHkr1J-LQrR2a0ErA3ioXw,9892 +resources/azure/compute/disks.png,sha256=P5brSv9NvbCvZtnMBXy1IgJjiaTVLayxsKVYPXVT1-4,14206 +resources/azure/compute/function-apps.png,sha256=zuFgiJ_4iFbSR6VuqtbsMGO4mM7KhbbN4Dv8CHhb2G4,9548 +resources/azure/compute/image-definitions.png,sha256=twEusAQHHQ8lOzBDxiJ0K9PEkz-m_KzuheotSbUWScA,7331 +resources/azure/compute/image-versions.png,sha256=KHBByFrhS-njLrLQi-eRU0kH962TUwAyE8NIE54sBzw,7500 +resources/azure/compute/kubernetes-services.png,sha256=Fq76Y8A2m7IHiSkcZR9-GVHUikrHdgzyC3FsqFnZe4E,11569 +resources/azure/compute/mesh-applications.png,sha256=qZkFLcxL1s7WX24NwQYQ_LFMXpjUjWj-NCgLuge7eLY,10785 +resources/azure/compute/os-images.png,sha256=mFUwuacKKlZP3QaMiRLGtKqF-BMgyKbFwJ_2ioNL944,7360 +resources/azure/compute/sap-hana-on-azure.png,sha256=MhNJfhgVimeIq1mflN08JRp8CQhptvf2HPbgvddHVh0,7417 +resources/azure/compute/service-fabric-clusters.png,sha256=vAR7rHttdCbos21I_qfcGbH7JscUcgdSFA1YHgGdFF0,10738 +resources/azure/compute/shared-image-galleries.png,sha256=XCVSB5kbRmpDS_njzoyT9llNKd_L-CqH5eKjhKXyFqg,4895 +resources/azure/compute/spring-cloud.png,sha256=c-8rcU-bVGs37KBL1l589zOe6O91UIndquSNKcxtcUI,15110 +resources/azure/compute/vm-classic.png,sha256=G_FORt6Ud19sZh8E67DfNl7vlBowdaCqJ1OtrpnerWU,5771 +resources/azure/compute/vm-images.png,sha256=mFUwuacKKlZP3QaMiRLGtKqF-BMgyKbFwJ_2ioNL944,7360 +resources/azure/compute/vm-linux.png,sha256=80yjDIKaZh4LNhWpzjaLGjE0pqqw5HbSgyIH703i8qg,8899 +resources/azure/compute/vm-scale-set.png,sha256=qoNqa7t0CiIAU5TRVIkz81owLtOSE_P2uc0cO8yRkSs,6721 +resources/azure/compute/vm-windows.png,sha256=kgXN_AwAU_bXmhY4ZQDZK5lqAVrfNjtXeWrp81H29o0,4377 +resources/azure/compute/vm.png,sha256=5D9ocv1-Tx6ii2qG9RUuSRtsiBCA2KJXnxNuOQqZRUc,6048 +resources/azure/compute/workspaces.png,sha256=C81UcMlah-C4r-G6pKa-zz_YENFGz7RAvA7WrtPSXfQ,5044 +resources/azure/database/blob-storage.png,sha256=gd8aOoG_yzzqSbqvPWJi3S6JaM3_uSPvZxvtq8SDonk,3158 +resources/azure/database/cache-for-redis.png,sha256=SdaA3tS1QPzKhYk8x86J6T2BShZyNqaq7awXEywZHTc,7922 +resources/azure/database/cosmos-db.png,sha256=Kbqc5oDB2aX3B8nf2Kfa8wTuGJb7o_3l6jz6yimeDdE,20772 +resources/azure/database/data-explorer-clusters.png,sha256=eXVoevQyPZ2kinpcW1e6JEk4ErvkWln4uUK-I6x5TbA,8194 +resources/azure/database/data-factory.png,sha256=WWl3P1eV-fHn_kPMoINQu04C2wOEf52KchkbsvgeX_Y,5520 +resources/azure/database/data-lake.png,sha256=xPsPbR9nyBnju3NzmkUj2nJ_BYJvTT3TevxEyjPQ5js,8797 +resources/azure/database/database-for-mariadb-servers.png,sha256=QEWaoJnT25F2tbd9TzlFeD05-CH8K_9aVhYjtpIHktM,10848 +resources/azure/database/database-for-mysql-servers.png,sha256=HybGmdL2aF1ZHxtHV1gE4UM8RKhdgpJ5fIKXqOOUGb4,9268 +resources/azure/database/database-for-postgresql-servers.png,sha256=q1MuVNGOtr2VLqGO76gL0MEoiMlL_eDLLFULqdX52i4,14000 +resources/azure/database/elastic-database-pools.png,sha256=1enHkko2IYoTvzm7P9RlVm6UaDXU0DTxBhtwe5pbu60,8151 +resources/azure/database/elastic-job-agents.png,sha256=ddSq0NO1kaiu0pvYVUUMWEhlv6ERql0900jEBwNCOOA,12166 +resources/azure/database/instance-pools.png,sha256=C93mHDXYpW5ohFS58LYz6XvHUDvRjeTGQ_gAgDyM-To,10883 +resources/azure/database/managed-databases.png,sha256=hdGXt_iT5Nr4ndTqVq7Y-V2IC5MZoSfy951CxRoY4I4,10561 +resources/azure/database/sql-databases.png,sha256=sFnwm2yXBxn4QWlaxk0u6xlqRUOqJ-5oRl5m4nulXtM,11460 +resources/azure/database/sql-datawarehouse.png,sha256=UrnClOHFGnG067iXCjpWGeCYGHFyfdXK474_spttfyE,7035 +resources/azure/database/sql-managed-instances.png,sha256=rKH0YDVbubc3kDRS_DBT654sIVDKllhDKJq3ujLDEqg,11550 +resources/azure/database/sql-server-stretch-databases.png,sha256=UrnClOHFGnG067iXCjpWGeCYGHFyfdXK474_spttfyE,7035 +resources/azure/database/sql-servers.png,sha256=vuZW9ROAx-b-tQ2B7L97kI2LlnT3AXwo6qi07pV1VYE,13789 +resources/azure/database/sql-vm.png,sha256=tlbv36-qCWHHDkFXZeKcdFo3Fngz6SwNGs3ieKms5KA,7868 +resources/azure/database/sql.png,sha256=X3qFqoAEn2GwDIx2ppsf9U1xMLPW8KOZetlljXLyHAM,13838 +resources/azure/database/ssis-lift-and-shift-ir.png,sha256=CmBJ3PBkfZfUcUYEXBP7XiX9vI5r6s7Ph--SVxdgIEg,11662 +resources/azure/database/synapse-analytics.png,sha256=2m-kLaJpvJwpDFcJNaj2t8PBv4HA7MP977YY_Udfc3U,12623 +resources/azure/database/virtual-clusters.png,sha256=1Q-tLraSU_g9wzb9NAmNDzrQC_s91s5Ux_ulVsN1h4A,9939 +resources/azure/database/virtual-datacenter.png,sha256=bxkzuWpgpPwFFQwkJ0l70l3RueRN8ksxABCqhkM42uM,10417 +resources/azure/devops/application-insights.png,sha256=gGOjgs-T4GZrRTsx5gPlyadPkhcRPrbjQKQWXWf53zs,20264 +resources/azure/devops/artifacts.png,sha256=UhMeoeEYUviHFgRxDBv1wF_gUef8NMq1tqVXprNAkm8,3671 +resources/azure/devops/boards.png,sha256=O1nlMHwH4voAvLMYal3wcOuXciKizMu8fbg2iBENHRE,6145 +resources/azure/devops/devops.png,sha256=KMNNP-xTLstOg5nFEsq6uPBBgNOcE4Cbiepuy9Hrhb8,6907 +resources/azure/devops/devtest-labs.png,sha256=pDl-kUZthFoCFmLThrKTDlkBKFlK022KPfG5AECmGxY,9112 +resources/azure/devops/lab-services.png,sha256=7T0tv1FEmWnWEHhQlv2tfHNT7aApAqzErwMytBexx78,9094 +resources/azure/devops/pipelines.png,sha256=LlkjuuaBXtNAnWED8YPeqVKXE_ybnQkb88sKw-Wfv0M,7157 +resources/azure/devops/repos.png,sha256=6Flsxu2GenENRzhK5nA-uHu7osfgKjD3gX8y_cS7kIA,8158 +resources/azure/devops/test-plans.png,sha256=qo4lQO9zNutvQnfaL1in3cCXaUA6jNrYLtfVHGwWHnQ,8518 +resources/azure/general/allresources.png,sha256=uETsGCuOoVqc0R_OCOxq9i81YZ1n-DH_7R7kPbDI-fY,2571 +resources/azure/general/azurehome.png,sha256=LGUTjeQnzcJW8Jert5vIK3HBoL-Vi3EnFtg31RdvwHM,11331 +resources/azure/general/developertools.png,sha256=WGhgRy_zMvuMtCjbMn0g-BJJE7So6Q2UVXs-ep4-j5k,24253 +resources/azure/general/helpsupport.png,sha256=fc4GTutTVDuFljmW4tCmYeZ9U5FWmZ2NiXtRO5zi7Mg,12305 +resources/azure/general/information.png,sha256=7SvHrFKu7VFD9SPMcbD0-Q9CvPBT_BKaBDM9PWycGNk,7602 +resources/azure/general/managementgroups.png,sha256=R-JbUAkgno-cUQ25eqECX0gkFz4fzpF7rVxMj3B-MdI,13041 +resources/azure/general/marketplace.png,sha256=SECLNO7ilXetjuCw7-PzJTki_oFEP9xb8OzKldlSC0s,7919 +resources/azure/general/quickstartcenter.png,sha256=0tFp88SD0UjuCBzbNeenmd2PiZxIW10zPd832-oQrBE,15259 +resources/azure/general/recent.png,sha256=HJAQ5zy5kCLZ3gWpfaBSTkerVuQ4mUf37vRRnwlUat8,18998 +resources/azure/general/reservations.png,sha256=bNdHc6srHgVsrmoHYN16xFtsDUyhRwOoqiRDHj7WCyg,15480 +resources/azure/general/resource.png,sha256=z7_HVFVJxEhytSUDIAog_v1jegBh4BcDPeaDebreSqI,13813 +resources/azure/general/resourcegroups.png,sha256=kFZisKjmGior-2cLWs8phRyopgFDVuCgiHpbaM3rGZc,10888 +resources/azure/general/servicehealth.png,sha256=B13sJ9jQF05vaITEze279uEw3kONCjBvEUEc6lqmeCs,18795 +resources/azure/general/shareddashboard.png,sha256=4Br2FOs83H8HpPRVZRtQfglbzrZz8brf9jlL_WnKdgo,4494 +resources/azure/general/subscriptions.png,sha256=gJGrvW6nCFnhvzV80JKk0hHzsaYa_v-1mj3KZuZ2eTo,9843 +resources/azure/general/support.png,sha256=3ljf7D5Vl6u8ihcgafC2I3Ymx0-rCS4zphES556W4iM,12949 +resources/azure/general/supportrequests.png,sha256=JhwTCLGXLgj7qDEK1zIacQ1jWrurKy1epMo5zjbGw8U,23928 +resources/azure/general/tag.png,sha256=E-oHhyKMfJf65yVy_fZyYQqqJ70ryDurFUTZ-AdBK7Q,8295 +resources/azure/general/tags.png,sha256=wdAWFnEK0x54Y0IcUXfKHQO7kRnRXq6UMqSzPF22vvM,15696 +resources/azure/general/templates.png,sha256=9S-niuUlwHfBC3N53ptZCTixgerPIts8Ag21vYU6Ilk,8830 +resources/azure/general/twousericon.png,sha256=Z5vl1_H4Xx-NdzDY9EUOAw8PKZMNG6mh-SXPeXFl-3Q,15439 +resources/azure/general/userhealthicon.png,sha256=4AS0L19tr6LpItOr2XdhKvMaedRHWwfGrR5vEeDDbDk,14868 +resources/azure/general/usericon.png,sha256=cOZMlIm65SNvXRnQtO8Op5qWTyVuqmk4AuBAKcf6DW4,6950 +resources/azure/general/userprivacy.png,sha256=6fHk8tIiw2LxLmzxRh8cQ0nA56btFE1gvzm9G-24suM,11242 +resources/azure/general/userresource.png,sha256=lDRym0wkhiI6rsKM9-KQPIMMXu1zWwHYx1JYtT4szTQ,14741 +resources/azure/general/whatsnew.png,sha256=yeS34y7vPlD4Zy8q3tKSVMGm1LrqmDgOnGw9iUYfsfw,11376 +resources/azure/identity/access-review.png,sha256=66d2DDcSB_YY90tZYpDJ9LxJfLu4zfLSCB40jGZ1xxQ,9193 +resources/azure/identity/active-directory-connect-health.png,sha256=ATRNa77Ngm6VfXcNiQSg2sTRU4B-ZbyD1xkwEAHvTHA,15363 +resources/azure/identity/active-directory.png,sha256=qLYFidOE3llc-i7HeXmR4KTtgAMDnI_B0fg6w6U2_Fc,13412 +resources/azure/identity/ad-b2c.png,sha256=VUYueiscD3G4wdHl0ksrKnsgsUNbOG5YYwxMaFtRDkU,14505 +resources/azure/identity/ad-domain-services.png,sha256=Z-tpGwoO9L9p80MJ_ny0WJvpQ_x-XzHQe6gPL7iUGE0,13659 +resources/azure/identity/ad-identity-protection.png,sha256=ZI4HUzxUA5OVDzMXgV3UP6hbOxpcG1Ft5Zip0KAcugA,10717 +resources/azure/identity/ad-privileged-identity-management.png,sha256=taVg4EtyJ6HrV4CF7RBj7WJfFsK_-jDbmECviPsR4TY,15853 +resources/azure/identity/app-registrations.png,sha256=RwkuaQVi1oL3GZGuK_VeailCf2QCPT03lR5c-AGDZZk,7818 +resources/azure/identity/conditional-access.png,sha256=fmA2l6cyNVDwNGVveTKafbb701cbu35JwF3SOwbk-4w,6880 +resources/azure/identity/enterprise-applications.png,sha256=tOmeInow1R7Uq6DZ_LreU-5R1EfGteem2R3Fv_FgqqQ,10025 +resources/azure/identity/groups.png,sha256=EN_MXZA7N8gPpzQ3JbQokaME9Vyw8d4FK-4C110SeUE,11200 +resources/azure/identity/identity-governance.png,sha256=IkoBmFkA_1S2OzdCHh3g7LW24MtyKP8p69cg9xgCuHE,17700 +resources/azure/identity/information-protection.png,sha256=JO_DbfOmyynRaO-AJFEBi6_W29S46PtuQ_9qhthEYw4,7373 +resources/azure/identity/managed-identities.png,sha256=duPjKNRGWmqM0y0KePsCeqRFTuDKaZRakr85jUiqY-o,14475 +resources/azure/identity/users.png,sha256=fqF7pqm04eln-zCAU4APZ1ScuEU-D5V_f8_MAT5RJWo,9934 +resources/azure/integration/api-for-fhir.png,sha256=w-U7A4W1eiR6o80MjERNvDRAheeRF4NbZNADj70lOu8,13245 +resources/azure/integration/api-management.png,sha256=PzzrnJ5zbzUKDjRw4YH-2rdmKuMfwU1R9uNI2knBtwI,11297 +resources/azure/integration/app-configuration.png,sha256=1WfV64aOaIuQJjYgg_b10rT4Fx9TnlmMV4hQqSrSc6Y,12654 +resources/azure/integration/data-catalog.png,sha256=_nppCrIgYXlscZ0Z9zutfTi55ve6YkjlDS8dNKPbXWU,5838 +resources/azure/integration/event-grid-domains.png,sha256=kkUWFBBYoNOVQaM-PbPz8k_FBcXMjUyoF0IcYGcc5KA,6200 +resources/azure/integration/event-grid-subscriptions.png,sha256=kkUWFBBYoNOVQaM-PbPz8k_FBcXMjUyoF0IcYGcc5KA,6200 +resources/azure/integration/event-grid-topics.png,sha256=vuNhn3yOtDmWYfrmmurP_jO7zmoGRtq03qQYsOhBaIY,6378 +resources/azure/integration/integration-accounts.png,sha256=raAhdYNBGSs7hSLVTb1ZvXOMA9Md6QaEdw2D8UIUZIk,2479 +resources/azure/integration/integration-service-environments.png,sha256=PRbtzkgpU8EoZ2CLcoTLlFDbRC6CmGiPDrCcDE62fXE,13042 +resources/azure/integration/logic-apps-custom-connector.png,sha256=4S5QnTwmpYKdgcdZY6luHpfOVja-0cDO81fZxN0g4ew,2215 +resources/azure/integration/logic-apps.png,sha256=_msS0m1lubdxHYWI4xFdz2OfrhA1faUtcRdYsjDOVPo,5793 +resources/azure/integration/partner-topic.png,sha256=8O9A7iuvQZSLl7FnmIeKnpFYC_o1E4QQM4mz1byDAkM,7101 +resources/azure/integration/sendgrid-accounts.png,sha256=K_S6CktDkNYrb8nR1fyGI6QnXN7Jo2AkR44xwWFxaN0,1580 +resources/azure/integration/service-bus-relays.png,sha256=29DUbFusyVnMVE8JurBTx1hTgN58xOGDx70s3ev3Iyc,5995 +resources/azure/integration/service-bus.png,sha256=kcLZ4pWqC02Vy1cqAwYRVotf83FkmT4vVabRTSde4PI,2859 +resources/azure/integration/service-catalog-managed-application-definitions.png,sha256=J1bBchQ2Whp4uvZTsT3LjYfbejae4cXSwOtUe_6UIgM,14632 +resources/azure/integration/software-as-a-service.png,sha256=IVA7N0zJewpOPSsvt9FAHdgFWjHDXRpoVaiI7GojedM,6822 +resources/azure/integration/storsimple-device-managers.png,sha256=CcPLu9U7uKRxHgXmhPZAdhKYqWMWebM8P0XtUYU3Dbs,6151 +resources/azure/integration/system-topic.png,sha256=j7fwDaiYGT92Ilzr-CSvWvJBSjm9_vaufbTUVH8YG60,6238 +resources/azure/iot/device-provisioning-services.png,sha256=y3ZORYIn7MUaXFA69bLxv1knvfAQRACh6nFJeeKmqHg,17317 +resources/azure/iot/digital-twins.png,sha256=45THMd2wMLKwivfRQ8W9WKv5dsnTrWPDSa_yaW5SWY4,9070 +resources/azure/iot/iot-central-applications.png,sha256=_whVQXNbBMgOCwdstrdkRIhHZoUiEEuF1fW8aCW-CHk,11595 +resources/azure/iot/iot-hub-security.png,sha256=dlECp38hkfyElNdlmVV7WdWISypjpDjBWpv6ih3wd6U,10387 +resources/azure/iot/iot-hub.png,sha256=fTdmhYldKMVJcLvNqFUwGvkrs5O-fWXUUK3jSyemqSk,10263 +resources/azure/iot/maps.png,sha256=iguRHrLCfxxuu-xFh0vSAsXflLjOtfpY80d7oc7P9Pw,11013 +resources/azure/iot/sphere.png,sha256=9J61O5P7WhC024BTeiQjr7uqLhgSL_G09LXiOG7p3G8,15765 +resources/azure/iot/time-series-insights-environments.png,sha256=U2RJ1CzVw_AoWpZo6iYilP8toFDdCY0pqEf4ozQ9C6k,11427 +resources/azure/iot/time-series-insights-events-sources.png,sha256=IgjDnsQP4N6aIl5EOTAXcQFd2gbORW8ku71fE2IG0zY,4421 +resources/azure/iot/windows-10-iot-core-services.png,sha256=lQ3qmzVMCfPbStQPn_hlCSa6KvfmqAKje0fAHHAGsko,10721 +resources/azure/migration/data-box-edge.png,sha256=MNAY0RoG9f-tI6AgzCqpcns6uehY5UsKZXiYrkgyTFY,7603 +resources/azure/migration/data-box.png,sha256=2YLLRL6qtMht5Zhwi3XVupLkDutT_VgFNC8DKMoDxoU,7230 +resources/azure/migration/database-migration-services.png,sha256=9DcXxxsCMdni0e_dZt1T1cP6Rc9FMgC0X6jOKH6Bo5c,10237 +resources/azure/migration/migration-projects.png,sha256=G1To-gY5jzRtgEwhyjsBp5qOUii2FBRqAPkfjE1A9fU,9457 +resources/azure/migration/recovery-services-vaults.png,sha256=91qHkDgKOXcgVlEcuYRTO3kw3thafJHkpCkmvgdgjeg,8751 +resources/azure/ml/azure-open-ai.png,sha256=T_5KfX2Nww4NsuIJFOVXRK4K3QbL8c4xe76afQpH-MY,52114 +resources/azure/ml/azure-speed-to-text.png,sha256=1Z3zUtU9pxEQNFH-mPcFgmZBycViSbVNlyJMcE04oa0,8017 +resources/azure/ml/batch-ai.png,sha256=7FICw1usjEbRAK9U0RVnRu4mtbd_84W_BSMQ5u0l-eY,10934 +resources/azure/ml/bot-services.png,sha256=9T2i6k0eQIkyLrWuufSdrvo0uEVeOiDFSBncq8ApyU0,17041 +resources/azure/ml/cognitive-services.png,sha256=PsQwgH6xaMJFQvnQ80oN6esg15BFwkgpYoRq9NRcbVM,11147 +resources/azure/ml/genomics-accounts.png,sha256=r9k-7zEJ-D7ULvEJyUakaKfvHxEmQ8au8TqFWkCMLyE,10113 +resources/azure/ml/machine-learning-service-workspaces.png,sha256=SyA2lPhkQ1rYk0SfwnTVKr4MKXavE3vvc-PvmGYODIo,7586 +resources/azure/ml/machine-learning-studio-web-service-plans.png,sha256=uxZUMe7TvU4zH1F5b4PtuOsDXHxLFDiYLkSlJdy1SiM,14379 +resources/azure/ml/machine-learning-studio-web-services.png,sha256=R9PUl8g1RppIzVD3V6LMi-drOlcYR9nnd1EHZjIERzs,14114 +resources/azure/ml/machine-learning-studio-workspaces.png,sha256=PPG-rK8Y4vTQju0wehZ2XBqyJUZoAAS4LcyU9oc4pd8,5681 +resources/azure/mobile/app-service-mobile.png,sha256=BNZWxfnPGzD2W9LEFlNHnySWW9iWuOBku7DUcQVJXZg,5355 +resources/azure/mobile/mobile-engagement.png,sha256=-e7hmqoi8y90Y-ab8Vl1P7CB1AhulhVHb0Cyp-opuXE,4128 +resources/azure/mobile/notification-hubs.png,sha256=vZpTyrmRFbmjv2PkxkyQ-nyvkvvBknznPqFG3zx_RBw,3665 +resources/azure/monitor/change-analysis.png,sha256=rzZ1WbzlWgy2BFUMoZnHbh8G7w8BCwPFAZU7m-2utrA,27424 +resources/azure/monitor/logs.png,sha256=2AM6SNrzQp6r5repzEpUjlrELHBFCJRsO44p7IZlehY,13976 +resources/azure/monitor/metrics.png,sha256=tQd30cKhpAEFoOGMD8a8rkbANmTlqUss_pr-4P86EVs,16548 +resources/azure/monitor/monitor.png,sha256=5qcTN2EfG54_Bl0uNv6GuULWIlnFJwdG2WafqSwzf74,32611 +resources/azure/network/application-gateway.png,sha256=2UE9P-1PJwJYa1cXfvgHWRWgU_nAqQ038NTw0DpGGDg,10455 +resources/azure/network/application-security-groups.png,sha256=2JNYvBgKZGbJ-ROPEvrkPMdAdfX8Es8UIbJyZ0kYV_A,8368 +resources/azure/network/cdn-profiles.png,sha256=N24JwLLcAssdOMsLYoJdqS61ixZF6UHxjTdPTAA2GGw,4946 +resources/azure/network/connections.png,sha256=jHhhO6Gg0EZsllANX-eMiGkFVHxMpMsugzNOCFuQetk,20817 +resources/azure/network/ddos-protection-plans.png,sha256=oMF5Vnc_Ke-nKQIpyAYbBKoN_CCKqiQPwR9cKPQf2p4,9966 +resources/azure/network/dns-private-zones.png,sha256=405QIk-qYbSq5P8DS2yhPQ3_eAAnj5Q7VPIf_atN_3Y,24095 +resources/azure/network/dns-zones.png,sha256=WgVUs5HI-btO1LTT7qp-W12weEOiJ4hzCsNHS7-piO8,35620 +resources/azure/network/expressroute-circuits.png,sha256=NFklO4eAaHrsUZsp4NW-Cp27FJimhO9q0UoXA_m1usY,10073 +resources/azure/network/firewall.png,sha256=9pshTtENYb2TGoZt1WPoECQkUh0HdFu5m3tOSV2dyQk,6221 +resources/azure/network/front-doors.png,sha256=sBnrgsVyB-n-tOVLSQhUYtIFHHQ4fsyGGy_k4pxqrc0,6935 +resources/azure/network/load-balancers.png,sha256=Qi9LEGQzDP0DAbCApNeWAwJY83Q96E9dvwNMsJJ5_vw,9161 +resources/azure/network/local-network-gateways.png,sha256=Dwi7mUXycK3j-JmuJCtKabky5beqGUJDR_YBunxxkiw,7854 +resources/azure/network/network-interfaces.png,sha256=41XuVmi3h13M5O1fjwx9KSkJcsov2jWAHWgqGdx_yro,5483 +resources/azure/network/network-security-groups-classic.png,sha256=MpQRJD65U761GVSGiC95UgaA82TbZAke4ASB6DYovjw,8609 +resources/azure/network/network-watcher.png,sha256=zqf3sEdj1HrDHX_YTK2zjMN_y2u15eCTQWZyFjhsd4c,20242 +resources/azure/network/on-premises-data-gateways.png,sha256=uxT7CGdiYItynCL4CwxcprfqDndi3xwq5ci1otvrUAU,9968 +resources/azure/network/private-endpoint.png,sha256=ppNuj2tfJj6_sEfDKTCtOTMZtetItf0PRBL9uyMhaGM,14806 +resources/azure/network/public-ip-addresses.png,sha256=uV68XziC50lppgUkgloISfm1wpmtwFqT3Pc3SlUqaPo,3949 +resources/azure/network/reserved-ip-addresses-classic.png,sha256=R-W3QbCphBtWYbdn4eDdYIRzP3bbwJte-G2gnjWZpA4,3770 +resources/azure/network/route-filters.png,sha256=I5Xegez85R7ftIGiEM_54DecbMCxa4rObY8IYdlnuSs,4930 +resources/azure/network/route-tables.png,sha256=DbZp4Gd0sZmNbQ5TSsqa5amsOnUTK5u4B7NTMMLOaJU,9704 +resources/azure/network/service-endpoint-policies.png,sha256=S5vsDU6p0rhrztkysaYj8LQRGYpnF5zAW-apJkOOeRQ,8989 +resources/azure/network/subnets.png,sha256=qDyQKsG904XwqV5BfM4SS04Mu_VL4Eh8_4BPHiUR3qk,60462 +resources/azure/network/traffic-manager-profiles.png,sha256=KoCLGI6tGgyqJMRpyC-y7kQPCQewB-ilerBauzx6PIo,10617 +resources/azure/network/virtual-network-classic.png,sha256=8XYj6mKwmNR6kWXwofIeTauKA0rxfrEixI7fOqTsV8A,7553 +resources/azure/network/virtual-network-gateways.png,sha256=lBHuHaxCw8V3-b3qP6UB_L3L3VzyQgrrUh2jJec0D6k,9512 +resources/azure/network/virtual-networks.png,sha256=I4v792DxYmLSaxfPqsgvNPCqIyHnL96FWcDU69ZdUuk,7698 +resources/azure/network/virtual-wans.png,sha256=d4m94rtwLRShGjMSHQ1JseB13R77BrOySybT9IfjmMY,15400 +resources/azure/security/application-security-groups.png,sha256=TdIkUDdnym8jbdD1w78KoEt6r0x-K7nIHmh1quecwac,9848 +resources/azure/security/conditional-access.png,sha256=554Ezx0qNRnMLifqS-svWkJbm0E73bcJmy2wAsD5-8o,10558 +resources/azure/security/defender.png,sha256=S8aCqPyjcIL4tF4EocRP_CPJwHn8phPAqoRVbexZha4,10526 +resources/azure/security/extended-security-updates.png,sha256=ltGAQPRCYL7MDk2LbpfzwJw126xWoxaq_A9VZ1FLn8o,13122 +resources/azure/security/key-vaults.png,sha256=LEMFgxyRVJIt6vScuoS55C_wF7eBIOLmBWrqyUFy_6o,12165 +resources/azure/security/security-center.png,sha256=q-d0RRcg5M5GzhfaXOJPSoxBT65yOfSQawus3pC_n7U,9895 +resources/azure/security/sentinel.png,sha256=Y4_Stnm0hJA-4xlbtmDgtqokdBMQ7gWML1BBmEOv8eI,10860 +resources/azure/storage/archive-storage.png,sha256=ldxQBeOljtvsdKgYaIRpDRdiffIWtZb1hvl8bW-q_CM,8384 +resources/azure/storage/azurefxtedgefiler.png,sha256=bdyn1eq7-GaEtW0HKcg33075_VMqudO6VpP-wYkzS-U,3569 +resources/azure/storage/blob-storage.png,sha256=N1JksUGFR9yeRUpByeqYrepBnWOmGjCQsBaYDZsxDyg,3819 +resources/azure/storage/data-box-edge-data-box-gateway.png,sha256=MNAY0RoG9f-tI6AgzCqpcns6uehY5UsKZXiYrkgyTFY,7603 +resources/azure/storage/data-box.png,sha256=2YLLRL6qtMht5Zhwi3XVupLkDutT_VgFNC8DKMoDxoU,7230 +resources/azure/storage/data-lake-storage.png,sha256=AMUVTSJMyjgJdOr4XYA1yTt1HzGzRvKJH17q8hRTNxM,9152 +resources/azure/storage/general-storage.png,sha256=zlyzSkB_DyuAPLdpOZrBNhIv4EpeertZOFNMR1y7i4w,4417 +resources/azure/storage/netapp-files.png,sha256=fiGcXJqF_HBij0-vQCrHbjy-AaGIJUSpGgnl0gYlcVU,2900 +resources/azure/storage/queues-storage.png,sha256=Gid0xB0YfJttDuJBa2CFM6ivhSOeVzDgJRwGF2eBbXU,3495 +resources/azure/storage/storage-accounts-classic.png,sha256=Dv9SSbzv6ZHJelU39Hm6rrLNzSNLUCGmFG05-Qf4IEM,2166 +resources/azure/storage/storage-accounts.png,sha256=8h3l69TG5Miwv8TMslwS9pSO0YeHspc5sJT4Krv9chA,2229 +resources/azure/storage/storage-explorer.png,sha256=H7wlOzhDsMTnPop6AHpKxwAYCfeRwnu5TF97bm3dMXI,3029 +resources/azure/storage/storage-sync-services.png,sha256=bDWtQG192cLFBDIsy93KSBTM4gcIY7mTDZ6umodE2rI,9385 +resources/azure/storage/storsimple-data-managers.png,sha256=iwpVouHqJw0PIEfg2Z_N3VuMygDYdhu67LMoZxAzZm8,11608 +resources/azure/storage/storsimple-device-managers.png,sha256=Ge8_7G6Jh-UNxVUmfBdDXunZWgfLLxzSYPAwMT_lMpg,6631 +resources/azure/storage/table-storage.png,sha256=JnlEB1CO_i1ZE1N8Cnr-HCBI2w6w4NbMbK77CtJeI8Y,4828 +resources/azure/web/api-connections.png,sha256=WXFEcuQ9ZVTIJD81uziO2Yoy8xDbE3ylhlJOgjYwtUw,8487 +resources/azure/web/app-service-certificates.png,sha256=lsDfNBhD-cGaHaFu-TlSDIepPkfH6Wqz78sz2DaX1a4,6617 +resources/azure/web/app-service-domains.png,sha256=kWfiYH2-typZCQJ2d0NcVnOX6t9W2HWXt2-dHSy5YEU,5698 +resources/azure/web/app-service-environments.png,sha256=-RJD8g3turE4o3xggVraJtQpZwlcjZtL6IvzY_x-Npo,5263 +resources/azure/web/app-service-plans.png,sha256=pStlhC9RVzaB2PieWFmCkQGjiYNP2p79fJsAwz5zzJA,7439 +resources/azure/web/app-services.png,sha256=VJ8Y4RGQJ1lBQnuC3j2nlqxLPxrNnuHBfvI7bOJwaN0,29974 +resources/azure/web/media-services.png,sha256=0VSg-WG9qEKsPntPHT-cVL28Y2C5POOxg4TK851r8wI,15847 +resources/azure/web/notification-hub-namespaces.png,sha256=vZpTyrmRFbmjv2PkxkyQ-nyvkvvBknznPqFG3zx_RBw,3665 +resources/azure/web/search.png,sha256=9Ssx-_NQnOr74ARQeKhMSOtNB7dAl8DrUixjDR6FdpE,9083 +resources/azure/web/signalr.png,sha256=XkDXMyLGHOaych_OI2-y_b2REYJFPpP1qg5yWGCXZlQ,8763 +resources/digitalocean/compute/containers.png,sha256=BtRbeF0hG_Q-TJKk42TteNqwb8TYXGr-ftJ6I-ktHJM,14348 +resources/digitalocean/compute/docker.png,sha256=_iVG2ZJyazBR2T7syuExwf7h58YaZs6p4x46pkBrUZs,13740 +resources/digitalocean/compute/droplet-connect.png,sha256=zfHxcpu8dpt74QAFGMaxuC89vbZwhRczURoxZV9w5go,22301 +resources/digitalocean/compute/droplet-snapshot.png,sha256=_fn8PD-F-f7alJiLFyqsUHwr4_0MqDPKYbunAGiDrFI,13773 +resources/digitalocean/compute/droplet.png,sha256=Kcc0qiYoaeynUiAeWq5qsHvlwG_NASt7ItkCJNh0AVU,12631 +resources/digitalocean/compute/k8s-cluster.png,sha256=wvl1HtsrNEZ3aBcdnzKUh7KGZFCwBUmX25gMaTHGjJY,21931 +resources/digitalocean/compute/k8s-node-pool.png,sha256=AMi8CzT35ATiG_VvkfR4WNwBvI3GtdmTTGmYdejU7Yw,13372 +resources/digitalocean/compute/k8s-node.png,sha256=PoDAogjaMP-n4otmgBL6NroktTAag9_kabKepx-AVJ0,6292 +resources/digitalocean/database/dbaas-primary-standby-more.png,sha256=fJMNwb7dn2CY8uo_1HKXAuAsevjM7RVkQtHgeONVB8A,26739 +resources/digitalocean/database/dbaas-primary.png,sha256=ixvLLrn7pkCQJu0iOuaEqtSTaLhma25zAt8AtL9eXuo,17805 +resources/digitalocean/database/dbaas-read-only.png,sha256=lS5tME-evdipJYqy2tQgy8tvoUXTwlVYfV2bN1DzUQg,17623 +resources/digitalocean/database/dbaas-standby.png,sha256=jWVMQyhjMACKf2sEA4Jp_JuEc63T6upMmmHws44OXO0,23517 +resources/digitalocean/digitalocean.png,sha256=RTSQN2CDAmJsPJ6u0NDXlL2MvjubAF1KNBZgN-P-JAA,15955 +resources/digitalocean/network/certificate.png,sha256=DCXEFHjmVKkdxwoMj1xQKpKS7hyVCwirunejfBUSxcw,5697 +resources/digitalocean/network/domain-registration.png,sha256=gv4rdKKdTc34BsszCdr6aZtWo5lPesxcQovzCPDNOxI,31351 +resources/digitalocean/network/domain.png,sha256=JmFJumIOYcKzTMCf_cR6zD4eroR2bluAMroAIDb-w2k,25941 +resources/digitalocean/network/firewall.png,sha256=7OM6WjTerjQN3j9mNDZ0ly1HSX4R1HC2SviXZS_oZNE,9224 +resources/digitalocean/network/floating-ip.png,sha256=DsHyR7buyRCOIvC110EswCFVKN7id_NSCjn4DMJvSU0,13232 +resources/digitalocean/network/internet-gateway.png,sha256=SGJthBBVrP8kmjVErkcBHcRtzH6kNPH5zZFtTYwIKUI,18139 +resources/digitalocean/network/load-balancer.png,sha256=d5oKbzKDtJbHQ53gew4iCIozIUDkGXSqEnC87CalfGo,11497 +resources/digitalocean/network/managed-vpn.png,sha256=wQDv5UoDU6xY56E6IbEh6qiCdtjl4P43c7HmSQyBxDM,22897 +resources/digitalocean/network/vpc.png,sha256=MtoFlufdzkycrirnZUOXDtYR-Ba9WjJsku6lEgvIK5s,21333 +resources/digitalocean/storage/folder.png,sha256=-CjM8za93hCusz2CwJJ71ZKfpT8OAGcjVheOVq_Erco,5208 +resources/digitalocean/storage/space.png,sha256=C0c7YCPCq3N6PM7mqGkw8gcP9l5realPP0g5XALs_5Q,18132 +resources/digitalocean/storage/volume-snapshot.png,sha256=qPi6djsLgy5riM0sQ2gAEaeG66tlYUqlt_0xR62pxAk,14335 +resources/digitalocean/storage/volume.png,sha256=XVMtsEX91PEX35zg9bMuf-whw67ZtSV7LGUKEQA3sAM,14501 +resources/elastic/agent/agent.png,sha256=JGDO3JN9Z-3_ARUtFqiD6jD-Iw3GvfOwyLH7GE9jlZM,3683 +resources/elastic/agent/endpoint.png,sha256=lZSscvoq-t4fSB2pDvVmfaTnHB4nS7NTKh6XWJWe49E,3778 +resources/elastic/agent/fleet.png,sha256=rNIWXfgT6Kx2XHo4KewVr1k-DCPzDVgKStDBJa2vw3U,2749 +resources/elastic/agent/integrations.png,sha256=sl4HVnAH5IlmVSeSprLBWZycDm07-0Y5sMQgPX5ynps,3804 +resources/elastic/beats/apm.png,sha256=Y0Ayh_i40LXFnFrKNRdu1OFbDgFbZoPaVdSs_crBE5U,1255 +resources/elastic/beats/auditbeat.png,sha256=6rR6JfF3_4l71lAegzPQJ3FznKAmaTzixo8bHvuohoE,463 +resources/elastic/beats/filebeat.png,sha256=ThZtWs72-s-R-HEgm28l7CuK7yDYgv3c16-VxD9rG_0,1433 +resources/elastic/beats/functionbeat.png,sha256=TbEygBKspqB50qB1XyT68RnxAfQuSOH0GCHFKLd2sbo,3471 +resources/elastic/beats/heartbeat.png,sha256=OX-vTz_--vNR3ok9cnso153SrC4U2azJL2M0lBvrAH8,3500 +resources/elastic/beats/metricbeat.png,sha256=lKn-DqBMNVhzLsP4Tx8uhKWTSmtj4cUUX1vyE2-TKCE,4803 +resources/elastic/beats/packetbeat.png,sha256=-U2Wd6VDpVeYSb0d8sZaunsAfukWI4anxqZHv9bBYqY,3097 +resources/elastic/beats/winlogbeat.png,sha256=dY5vYj4zzEGDmCMKvGWL4INiEQNQd-7cF32PBt5BaJw,483 +resources/elastic/elastic.png,sha256=py0-EX823fCtAfHSnO4vvgOMWRNRxFR8l3sIlpg8si0,11837 +resources/elastic/elasticsearch/alerting.png,sha256=eyqRtaL2tgWB_M6V4sRSlcoVuN-saCs1NfsfY3HKzdo,12821 +resources/elastic/elasticsearch/beats.png,sha256=sEx9mdTRQOAILMzk5bZbOBEPglfv4xFYCQJ7TkX6Ar4,11349 +resources/elastic/elasticsearch/elasticsearch.png,sha256=JCly_xV5-bqU83w9CZDjEaNiKBw2nh1PMgxGtEZ3iaE,4707 +resources/elastic/elasticsearch/kibana.png,sha256=UgFjuhykNHcvHA33Ehn_KVGKq_w8petLnR0Rl7jdFuA,5324 +resources/elastic/elasticsearch/logstash-pipeline.png,sha256=O6mwOZGN_AQ2tmDkaWlCTRlaj6HAAJwcDzaEV4prtxE,1411 +resources/elastic/elasticsearch/logstash.png,sha256=zTdtl6o9o5q9RIEzr0CKsRNUNH4Irco72yjr8aHz-6U,2523 +resources/elastic/elasticsearch/machine-learning.png,sha256=nfAH8GUM7kCvbRDJCFrCTh0pCM0PTxdj3PSJ7m0gZ7c,8100 +resources/elastic/elasticsearch/map-services.png,sha256=0bzJ_3ToeHUnQM2NyHVH1cBywkFIYyElFtO9MyWTQjU,1951 +resources/elastic/elasticsearch/maps.png,sha256=NaGq-3-eP3DbZjuI2-R6dYte8EUdG7htuVq7-7PNj1I,6666 +resources/elastic/elasticsearch/monitoring.png,sha256=hIF2UwefknHknpoZocAkmI8v4cFQmT_hTjQ23BxqlXc,10480 +resources/elastic/elasticsearch/searchable-snapshots.png,sha256=0WM-gEN62-CZ86LBdBO2T1SCod-1OaUaWk-m9vaXTYs,4038 +resources/elastic/elasticsearch/security-settings.png,sha256=4uBsTRxbOgYRWu4a7nt-vHJ2Y1sS2J-z2WYpni8_wL8,9224 +resources/elastic/elasticsearch/sql.png,sha256=sO9acJD_Aa_RQfsmexjysFxvwLHWdMbbLfYILA5dONY,2337 +resources/elastic/elasticsearch/stack.png,sha256=SOzwjw_UxdPxyKLQ6PozBBMW21909iig8bJZ8RKKnB8,1007 +resources/elastic/enterprisesearch/app-search.png,sha256=FnXrzxPmorh9ry75yrEJZksxJqnq6146YUh3Dp8AttQ,7492 +resources/elastic/enterprisesearch/crawler.png,sha256=O6AReKAJPr-pkaAr5QdxZFYm4YWwhhnkeGC8dnC6g10,974 +resources/elastic/enterprisesearch/enterprise-search.png,sha256=cUobKarM25kcXpHBW568yztBZSRSwKYWmqgmeOzHLt4,7822 +resources/elastic/enterprisesearch/site-search.png,sha256=LsFFX3Ulzmi9DV8951BCoLNoDIFyQtXv2GD5XZ6QLj8,4438 +resources/elastic/enterprisesearch/workplace-search.png,sha256=Ji0tehyfQ45Jipr0WmhkVyMhMN_QGDndD7Wp5m-pT_k,6811 +resources/elastic/observability/apm.png,sha256=bDDXJf05BHhEIcDrmVyVs8bzz2JpAu3IXyEF_a5VJLw,1430 +resources/elastic/observability/logs.png,sha256=Id5ugPbR1Syugfts4lXmzcC3WxXmC_urA67f_m78n-E,5091 +resources/elastic/observability/metrics.png,sha256=kH9PPM_xaEFRKkjdLeRAw-JWdfpmAEhZVtl8Nabl6n0,4689 +resources/elastic/observability/observability.png,sha256=JYB4V2PXUTr019znBVg_j_E2X2cGrK3VTtYYGJFtQa4,1255 +resources/elastic/observability/uptime.png,sha256=b6rFOLVXifGiTub330kwFFStBQYThEM2oeIihSDhaLE,6646 +resources/elastic/orchestration/ece.png,sha256=bjvchJGw6ZUiQ2Dyu5d7uLw3zcj86Rwo3JJbSygFVxo,8963 +resources/elastic/orchestration/eck.png,sha256=YmVb3jKssqOVsKQPiKbLjwdx8H0GJ37Jpu8Zarkm22w,10381 +resources/elastic/saas/cloud.png,sha256=IJVwIzOzQHYdMizNaP6HS_UXZe3_3S94UpOAtzA1UC4,8131 +resources/elastic/saas/elastic.png,sha256=py0-EX823fCtAfHSnO4vvgOMWRNRxFR8l3sIlpg8si0,11837 +resources/elastic/security/endpoint.png,sha256=DidhtdupG0znz8mBRYOwfxqoRJbKzIjUt0KRTNlUT4Q,5098 +resources/elastic/security/security.png,sha256=U12GOJO1qLsWU-c5WpCG8Kv_gKzq6PlglM-t1D2Q_HQ,3307 +resources/elastic/security/siem.png,sha256=RS7M5E7WWM_DgnGMOgtY-FuLUQhVFQiYT2VnwQk6N08,5302 +resources/elastic/security/xdr.png,sha256=3nzrixhmj6TLm0cO8_SP_VOYzVaeOgCKNMh5Hk4Yhiw,2621 +resources/firebase/base/firebase.png,sha256=1NTTvsImJ2MjxD-4TWJREenIAGvA_65E4HSsSFM3KGE,23891 +resources/firebase/develop/authentication.png,sha256=6u1lLxtUk-t5znTfu4aMBWmgRfsFMyMPKXbUo7bUo_I,16604 +resources/firebase/develop/firestore.png,sha256=F9BPfP3Lhy4bes05HBKUHEx79UE6MYU6Z0K9_tp6gMw,16458 +resources/firebase/develop/functions.png,sha256=DVujkPG7R5kIu6nMJSM2zM-2Xitrk2ybSJLmtcz-mQo,16243 +resources/firebase/develop/hosting.png,sha256=MkGzX5byprLlrsr0Z8ud-lJ5Q6sQ10AOBRp3831L8QQ,17537 +resources/firebase/develop/ml-kit.png,sha256=ne8skizP1XnfvoboBtTgLvaXyZTPjOrPQz0Pbb5noks,16129 +resources/firebase/develop/realtime-database.png,sha256=4xzgVWpd5JotvrP__fR3LxfWKiNHx4ssgy-r0oW8iE8,15801 +resources/firebase/develop/storage.png,sha256=3DcEG4ybGDpHze1lnAkrIlzmJr3VP5E-ambclvhld5U,15850 +resources/firebase/extentions/extensions.png,sha256=Mut05yMvK0tweD-1C1K7xYZ-S_BW_M4O8vdnk1iftPE,16857 +resources/firebase/firebase.png,sha256=1NTTvsImJ2MjxD-4TWJREenIAGvA_65E4HSsSFM3KGE,23891 +resources/firebase/grow/ab-testing.png,sha256=6HK6850SpfRlu0eTdabTn-wvnNW-VOZ7UxbLiVAC1cE,15733 +resources/firebase/grow/app-indexing.png,sha256=Exs0tzVQ6CD_5QcB-GKJeZ6RD6bKU3U6gz_JAhRN-Pw,16956 +resources/firebase/grow/dynamic-links.png,sha256=rbi2tQODzEWlWGzs0SsTGZrDNLWkSVGuHSPbB1hTAQ4,16948 +resources/firebase/grow/in-app-messaging.png,sha256=j9EQXn_vkkAuL3lBaXS-Tcd34Gp3gTzwFS9u5GRX81c,16712 +resources/firebase/grow/invites.png,sha256=1aYURN23S8KC50JwaSyW6TIv5T7BzzbGCA0To5h3VGA,16803 +resources/firebase/grow/messaging.png,sha256=dYqDbOLI402YxtIPycStLi6u3fSEpbM75D1_syN8BM4,17877 +resources/firebase/grow/predictions.png,sha256=zBLNpaXsTju_jK6tFWvO8keqWuKT7Vke0wDsLlsHiWM,16824 +resources/firebase/grow/remote-config.png,sha256=haN35b7HJXRVmBhG750M5IBEJtwTE8wtoXL6kkEXQzY,15692 +resources/firebase/quality/app-distribution.png,sha256=-9SDSV0dMwI5_gFX0LdYXN5OaQEmDS4UmKkg7Yo8gks,16538 +resources/firebase/quality/crash-reporting.png,sha256=JoZHy14iYkySgON58m9cPUPsNF5L_4o6cbOuP2UtaRQ,16383 +resources/firebase/quality/crashlytics.png,sha256=U88bYjaF6CmMhpKeVfG3HTe8zmX0eC1BX0kwFFvpuS0,17014 +resources/firebase/quality/performance-monitoring.png,sha256=jnIh-9OdPdJHk19MtHqY5pTFIjIfpKnvnCZE3bJmeRE,18342 +resources/firebase/quality/test-lab.png,sha256=4I5r8fbK2JmM6WSSRrLrbRDoIk30T892aUgTt5a-BvE,16279 +resources/gcp/analytics/bigquery.png,sha256=W2MrxMjOLJVwCZQkOu6wdQcqE4PiM3lQr_uQDQFMfLg,13259 +resources/gcp/analytics/composer.png,sha256=Lt77QTByo5elWJGI4eaMzyJkFFx5AlYGIjPQCoTKiYY,1679 +resources/gcp/analytics/data-catalog.png,sha256=wwn_AY5ui7RdWpCF0dXSNQpDx_SxvoE3HtRuGZDHWAc,12098 +resources/gcp/analytics/data-fusion.png,sha256=qgi_i6AJ1ld7P_18YdeQK8LHgGzDTCJJKPQlA1ghvts,2987 +resources/gcp/analytics/dataflow.png,sha256=2UTRL0QdCjNCSqFNemzsoATpdXyCOtc50DvogxIiFqM,17068 +resources/gcp/analytics/datalab.png,sha256=FihSm3o9PhvMJv1hBWrIPCTvgxQq7Sqy0xFzTHW5oxE,9463 +resources/gcp/analytics/dataprep.png,sha256=9lx1rMzIwZyHHBgLnCDLejlsMOfASxwoqlBzAoC9oIY,8674 +resources/gcp/analytics/dataproc.png,sha256=HAfGv4yFuRZLi_NcyFhZI-m2FDkc2WDtIkEYGy_SGBk,16589 +resources/gcp/analytics/genomics.png,sha256=S_XuF-zTXJJHGDWjZc57CK26XRtmYYLbWL6L5ualmAU,11029 +resources/gcp/analytics/pubsub.png,sha256=9ufPDzEGjjBCjSxibjjM8kGRTD1PLpEBs3rrueYuk9w,13047 +resources/gcp/api/api-gateway.png,sha256=Izdk-Oq9MBUHrRNfRK61ZeHYqsTenBbIlNBw_Ep7M4I,9016 +resources/gcp/api/apigee.png,sha256=ZToOFLmq_NHSrhwvAGOajk-oIYedHlmz7kpc0nMOhPg,15239 +resources/gcp/api/endpoints.png,sha256=BvPV9SlwUNTi1mWLZQnocSgtUgS7ZH2RxLV55aFUBlg,10361 +resources/gcp/compute/app-engine.png,sha256=h50pwJAAeajPdWicQBi4-TFPdGVzTj9KWSEnqzZGvHE,14319 +resources/gcp/compute/compute-engine.png,sha256=XFYoH7s4clmo3RWp61dPunjoebGGMNnfSRfhIi-vzyo,4297 +resources/gcp/compute/container-optimized-os.png,sha256=4CjLviKrt4TkyCa0uHw6bB-TYezyvtqWH57306EhEKQ,17747 +resources/gcp/compute/functions.png,sha256=ccZyy41RepV5tDQ9kIEOTW_H1d7VagoHr5UypGDaJOc,8129 +resources/gcp/compute/gke-on-prem.png,sha256=poI3dI3nOtwrgbyFTYC_k6ia-jkroNTLxtdFHgDWBXs,19672 +resources/gcp/compute/gpu.png,sha256=hzwgWMZtrKiEWZd6IESB0G0jJ9AAQE2TdJaR2xiyvVc,5873 +resources/gcp/compute/kubernetes-engine.png,sha256=ilK_pHOaqR1m0aWxDNZ0wok3iMq6-4J86q6eQkbtG1Y,15296 +resources/gcp/compute/run.png,sha256=NuoTmBElT-AGa-GijJp_lG9hZq0dLppiuEx_LI5bIOo,11188 +resources/gcp/database/bigtable.png,sha256=x6AUV16eNfaHn8EJG7sK1AxZGVRP39xeyWMYdRW6Guc,22749 +resources/gcp/database/datastore.png,sha256=sNpGRR1qa9cpDFCKOHeAehlhdTd9BBfKjrTJTdT8SOk,4625 +resources/gcp/database/firestore.png,sha256=SDS5gnuC5il8-RDfO5DqpwgEVKR0_7UEF9n0ZF-tobQ,11635 +resources/gcp/database/memorystore.png,sha256=g3Lmo-lD2EQXkzq1fZ3vzOmP79iVU_Ed8i8Xyvk0cY4,6835 +resources/gcp/database/spanner.png,sha256=Izq2W_xT0NKapWqbC_K2ayskj-pRg2uWe-dN7LJPFag,13143 +resources/gcp/database/sql.png,sha256=b6i6OlZhIVb3MmUYL0yECUzbHqqqeRcez2SvHoIbPbo,16323 +resources/gcp/devtools/build.png,sha256=tXHK7hah_1V_QosWa-rzpXuRH4RCVZqQlfk9uMnO-E0,13650 +resources/gcp/devtools/code-for-intellij.png,sha256=X_WNul5czlV1kbWyuLRZUqbTZQalnz7-9OzpU8V2k4I,5144 +resources/gcp/devtools/code.png,sha256=wZ9pHczzRiO7Tn2N5g0IVNLBk28-3Vozexjzsj9IeXs,11897 +resources/gcp/devtools/container-registry.png,sha256=-YMXEYZa6g21hHe5zB-g30q9iuODIUQp8AIVH4H4KUQ,14897 +resources/gcp/devtools/gradle-app-engine-plugin.png,sha256=X_WNul5czlV1kbWyuLRZUqbTZQalnz7-9OzpU8V2k4I,5144 +resources/gcp/devtools/ide-plugins.png,sha256=d_qYLkeqKM9RWg_nNSrQ9NSCCk_B2wVUZQW0sRcIFuU,14355 +resources/gcp/devtools/maven-app-engine-plugin.png,sha256=X_WNul5czlV1kbWyuLRZUqbTZQalnz7-9OzpU8V2k4I,5144 +resources/gcp/devtools/scheduler.png,sha256=pbJ6yDhA1WJmiLE2Es0r3yx75xXP81oetKY9FUws1yg,18179 +resources/gcp/devtools/sdk.png,sha256=X_WNul5czlV1kbWyuLRZUqbTZQalnz7-9OzpU8V2k4I,5144 +resources/gcp/devtools/source-repositories.png,sha256=X_WNul5czlV1kbWyuLRZUqbTZQalnz7-9OzpU8V2k4I,5144 +resources/gcp/devtools/tasks.png,sha256=biDaXe1P-vH5JQtNaCH8JqON8d4Ydxs1bboXChyWwZs,1321 +resources/gcp/devtools/test-lab.png,sha256=XbqU-QFt6B_tTxBbqnWBEjTNGSkpbCE3GdrXYpJES2c,3060 +resources/gcp/devtools/tools-for-eclipse.png,sha256=X_WNul5czlV1kbWyuLRZUqbTZQalnz7-9OzpU8V2k4I,5144 +resources/gcp/devtools/tools-for-powershell.png,sha256=d_qYLkeqKM9RWg_nNSrQ9NSCCk_B2wVUZQW0sRcIFuU,14355 +resources/gcp/devtools/tools-for-visual-studio.png,sha256=d_qYLkeqKM9RWg_nNSrQ9NSCCk_B2wVUZQW0sRcIFuU,14355 +resources/gcp/gcp.png,sha256=vUngMYPazw4MXb4nnoeuOu7RZh0KbVz-eVd8s2GAKOM,19476 +resources/gcp/iot/iot-core.png,sha256=TzSnIIczxLF1le5yAt1o7DdR9HhVtqCG1CsFJ0oAPbI,13857 +resources/gcp/migration/transfer-appliance.png,sha256=PK07sNmbp-2mMMrFVfwxgz5vsTPLQxcOKGfSxLtReJ8,5528 +resources/gcp/ml/advanced-solutions-lab.png,sha256=0k--NdcVTr1lD8C6ZAropjV_Vx-KYfzBAHR3lpEk98I,31551 +resources/gcp/ml/ai-hub.png,sha256=rMtkkplRdkUZTD0yz_d9cF1TDk-Sd3uQZ22-6eavmr4,12244 +resources/gcp/ml/ai-platform-data-labeling-service.png,sha256=M8lrGvqgfoMD9HBhYJBJgAOu8pwnxqQMgEpMqygrbHQ,9166 +resources/gcp/ml/ai-platform.png,sha256=hhFfUvxZdacudPbyUDESBJOTcaSSRN6Nt0Xc2c6Afig,11394 +resources/gcp/ml/automl-natural-language.png,sha256=3NDpzYuVCbyMAgkZt2b1rCr6YGQGdyCZcV7rbOgw5ZI,7846 +resources/gcp/ml/automl-tables.png,sha256=i8YJAoRBFPX_nfL8MGJZSQmea1snEjcACsQgLmvtZhg,10334 +resources/gcp/ml/automl-translation.png,sha256=wMbJuU8KEmiGjQxOIYU65i3-g0EsOh8_1IcvAtod6eQ,11195 +resources/gcp/ml/automl-video-intelligence.png,sha256=CM4uWIo6TCtDWpjTTPPTThrZJx2SZNNS9Y6l875hgGs,10445 +resources/gcp/ml/automl-vision.png,sha256=Ru3OnSO5ERvB78iePWKL3IzOQ1Zvsroo2tmgXqQ2Km0,9337 +resources/gcp/ml/automl.png,sha256=WoYOsNQ2TCE9jaiXJMWNmCzWHBdz188Y7B2my6gf2uM,13232 +resources/gcp/ml/dialog-flow-enterprise-edition.png,sha256=i0WDC2007QF1g2DBbQ25-Rx_SwW2xCiXtzR7571k2DE,8146 +resources/gcp/ml/inference-api.png,sha256=WGmHE664DghDbA6876Pd_gxj426NKMJv24WVrISlxpM,4576 +resources/gcp/ml/jobs-api.png,sha256=fhx1ExJN_cvGeRBmbZvSvc2uIJ8CMG-bgWMEltRzlwM,9456 +resources/gcp/ml/natural-language-api.png,sha256=2wYc1jDh_-ROJNdl4bInQ8TNtUn88bIb6Ssn6WB1m0s,1768 +resources/gcp/ml/recommendations-ai.png,sha256=UEvnLCuNuOuUog3sFsx1bcDKbnuJD16MM9YDx6uUbsI,5352 +resources/gcp/ml/speech-to-text.png,sha256=_wwSuldfAcFhsmkWdLr3BwZhmcR9kgKHAPq0eSVgXO4,1532 +resources/gcp/ml/text-to-speech.png,sha256=8xUTam_EQ6WlNa0e_JoLIHXRy09M8Y9nAnxLbY_LZgI,5630 +resources/gcp/ml/tpu.png,sha256=S9_bfCX9C7E8mUfs42StavWYVHiRLzpta1WFSh2K_wI,11491 +resources/gcp/ml/translation-api.png,sha256=RRXGRqaq6LVKqe4JUAJ9KH4j-eBQ02hwsc80tHnB9FI,10971 +resources/gcp/ml/video-intelligence-api.png,sha256=1-pjqMj2a_9CHT5T0WP_7XAes5-qjI7yjbqX8dgfY-I,8167 +resources/gcp/ml/vision-api.png,sha256=wkdTYV2Y1Mc3dC9BzeMkylYLx7FL9sEqF12_Gp2qoE0,11998 +resources/gcp/network/armor.png,sha256=EjQ3tAicHG7XKFFtBnaqUJzYyRfP_zYfyNhEaBq9pJc,16586 +resources/gcp/network/cdn.png,sha256=fWJUnXF-AeY0mr7Q6GeEYzBTc5DUwIUV-qrhjkub_wE,9162 +resources/gcp/network/dedicated-interconnect.png,sha256=67cwxubftpMqMY5f_ZLbaH-rxo_mZFCWBkDM-gZgG8c,1289 +resources/gcp/network/dns.png,sha256=5hhngj26ZRR0F-3iHMcMyr0EiNlPQ28fNTKaeY2PZ44,2107 +resources/gcp/network/external-ip-addresses.png,sha256=xOr2YO8Eoe-M2Cd7riLih-u0wmBTEEIIrNzgoZSiTC0,2208 +resources/gcp/network/firewall-rules.png,sha256=7rWfJgcrcaXJNPYtYaPFHXYWvgPvPJpD66HN5TgCNH8,2002 +resources/gcp/network/load-balancing.png,sha256=tO-Zc40Ajso1iyTuXqxnin4y3IZpnvrKdtLpc0DhsyQ,2793 +resources/gcp/network/nat.png,sha256=uM2_921QL8kKCscOcEni5lK3WSQraw99jemw1nyDxo0,12412 +resources/gcp/network/network.png,sha256=cmGZ5U-7Nlr9ILJi762keTWyhVbfqG5PWTOlhn0zJY0,9432 +resources/gcp/network/partner-interconnect.png,sha256=u9oZwG8Geyce_hjEvS3zhfCZICfy_97miTlTXMuaxV0,1987 +resources/gcp/network/premium-network-tier.png,sha256=RBaUBRvqGuvkb2B7_fZmeFnhRdLjYVp5sVfthN1YUeI,9251 +resources/gcp/network/router.png,sha256=VN7lMGyP1qEX5nnjAF2-uIxE9DqAP-i3fDwMCIYllso,6834 +resources/gcp/network/routes.png,sha256=d6Urdm_ccJ0TQVlFYA_ycpNmyQ0Xu35RgUur82FOxYo,10822 +resources/gcp/network/standard-network-tier.png,sha256=Iamt-1CXyWMlMwOyBEqRDOGMmFMltdeD_SvfoOeG-TM,9276 +resources/gcp/network/traffic-director.png,sha256=0G0e20MsLdiM5QXO_214WFlxFpUBZhn9ayepLfQQyX4,9179 +resources/gcp/network/virtual-private-cloud.png,sha256=RbpGRzD4-v8fk2SZdpmAxbbbF1NWevReN4tjkauy7bI,2097 +resources/gcp/network/vpn.png,sha256=NO4X91b5u0X7ruNBdzlbwv4DBtAySYeJjcFH866Gxk8,5180 +resources/gcp/operations/logging.png,sha256=KCn2B0S_5wYYMqZ-LStbE9HZm0ovA3URODegcwVzGd8,1047 +resources/gcp/operations/monitoring.png,sha256=M3VbK0dWrNTL0YoXf8U0uBIXLAGMtTmth4UkTMAZsmQ,6742 +resources/gcp/security/iam.png,sha256=eYF3Gf8XovdnxXXn1seS9rP2lAbfxU1R8nM87S3L7Qo,12245 +resources/gcp/security/iap.png,sha256=qZ8KXEj3KneZU1c_B0pVpTGk507fhnUGsVqNBXgnlOE,11813 +resources/gcp/security/key-management-service.png,sha256=ejGsIM3GEbXKdR3rSOxr-qpXP1w9iA5mi4ALlHVpbJY,11835 +resources/gcp/security/resource-manager.png,sha256=eYF3Gf8XovdnxXXn1seS9rP2lAbfxU1R8nM87S3L7Qo,12245 +resources/gcp/security/security-command-center.png,sha256=3dnyWbfkWwkpHA60CQw9BuzMsaO4hjbYA1iNjV1IJkA,11188 +resources/gcp/security/security-scanner.png,sha256=jE33Kx3KIpsvQHHx7ZLOitV7tJM_Tb_ibHzMDM1Z3uw,15162 +resources/gcp/storage/filestore.png,sha256=g6MniS9tRcpPSp0CHv_uuhjVyxbk03ciaZCTZ3wO9e0,2131 +resources/gcp/storage/persistent-disk.png,sha256=ZbT1ZOTJhldwEtuwRHU6m5cR4ojcSJJJnkZ33SPxw6c,1339 +resources/gcp/storage/storage.png,sha256=1fNXtrgRVTk6DQSb1M3kgJy7xgDKNvcjScTiQ4oF1_8,2047 +resources/generic/blank/blank.png,sha256=Ouap5gexY_dQ2OJTsB7DNNbHz6IrEICoThatbpCsALM,351 +resources/generic/compute/rack.png,sha256=uUPRBUJOYoRqTGHaPhnKKEI84yZQhj8WGmH4VztL9Fg,47357 +resources/generic/database/sql.png,sha256=cp2U1jSzZ7xa4hXVV3Yt7Wy4xij_C1wInGNF6YBjTsE,3197 +resources/generic/device/mobile.png,sha256=RMbw439B3ieI5sxtzIF4Q4Fuqof7BEn1l5dfP552xic,2693 +resources/generic/device/tablet.png,sha256=V-25khNr1p71FzfH4dovIZUt65Ls8JtVOXnXG5hjhTU,4966 +resources/generic/generic.png,sha256=gmLTBEgIioBVBaOz-9DOsQoe4naw2I9qNm6QDfa7wBo,11406 +resources/generic/network/firewall.png,sha256=XW4NqgWqKPEBsHqS5_Adew9tTyxykCZ9TR1LRboX-_0,6473 +resources/generic/network/router.png,sha256=CQVY6mLVdU2uWkjQi_lKH5oP3COQ74JkkjCY18O1GEQ,6536 +resources/generic/network/subnet.png,sha256=yDO8IjO7GzPLGhx-R6EstCDojRJ0b19xavuE4ol7-qo,1225 +resources/generic/network/switch.png,sha256=MvlEq7M4vIw9PF2XmAEhYjCDTPRt1bsfdC4FLO4Fb8U,3166 +resources/generic/network/vpn.png,sha256=apPBH_9kkrBu8XFFMRJ4XbRpXnI-4HwSnay5MrnviG8,4908 +resources/generic/os/android.png,sha256=vJPgmhqVXrU_-bmZcP6CFqt1PEdq_2o9KAK6IXqbc1I,10873 +resources/generic/os/centos.png,sha256=wXXNePNJQywynmHDLE-vV_G1yr_Nvh1n6Yn9GzxRTnY,37070 +resources/generic/os/debian.png,sha256=r57FW4myRq8aOvYo4Ohwa5dXXbelxLfa9o24IclsS_w,23749 +resources/generic/os/ios.png,sha256=EBfy_2uzbj4lIgFOw7BSnwZdBUwcqQCE70WSdIdLxug,4977 +resources/generic/os/linux-general.png,sha256=MMdHXhR_uxaUDvsc5daefkI9sgkOckyz_0rErH-dEos,16083 +resources/generic/os/raspbian.png,sha256=Kam5CadphT7dfllr6BTYpiNzwglbuc5eNcXlWOeGv7g,33103 +resources/generic/os/red-hat.png,sha256=kOhOobhvhd8AzlJ-WkAdxHAIhcr_5oYQvEpMFgJuphw,15355 +resources/generic/os/suse.png,sha256=PH7FDn28ITN1w2x9RQMTC1xdP_BNLuecllxLyHHqtzs,23837 +resources/generic/os/ubuntu.png,sha256=bDS-ddPEOIiqc91NzcEZ1HGu__wBsxjUa06lEajyrPE,18614 +resources/generic/os/windows.png,sha256=suanh6D1rOJwuCXkvn52nUZGSsPSe_TefnMTcn7gq2A,5886 +resources/generic/place/datacenter.png,sha256=hWKjrGbBPURGc5-kgGAMVdk7qduGn6-j9GdnS9-sPyY,6731 +resources/generic/storage/storage.png,sha256=G-skK0sbLtSIQbdt2QaHFTdptW9FBE6moTPNSFh8QN0,4755 +resources/generic/virtualization/qemu.png,sha256=GX2AClDobazZHK6OioBw_j0PwsIAGzOJN_UuT0IoCfM,20030 +resources/generic/virtualization/virtualbox.png,sha256=RzgwnFKtmpp5r2alruGjFfWT0t2i5gMVr-zMOs4bGvs,12794 +resources/generic/virtualization/vmware.png,sha256=lBpDkIo6QSMPJ_5AJfe-tWnpIE5Z_iAWWmiltJNkMV0,8236 +resources/generic/virtualization/xen.png,sha256=RWWfdA7licpYiXg1XJmlPmt02BAkwrz2k842BfmbcoQ,11604 +resources/gis/cli/gdal.png,sha256=yV5yzuk_FJzWIcjWyiAS0wf-jHRUeIVLPM256USzwG4,106706 +resources/gis/cli/imposm.png,sha256=lLbu5vK9qT8cyrDybzGpp89Rg85Wp2jTetzEoax3RsE,20372 +resources/gis/cli/lastools.png,sha256=Irz3tYJoP2qAL8S3UCXPtnohMJBOeUU9vKBONXJa7DM,33318 +resources/gis/cli/mapnik.png,sha256=SNjWgFSfFHt0-bRxTvZ22Xwn-koU5_YeMWDRW4xV3Mw,39813 +resources/gis/cli/mdal.png,sha256=oix27Vo5KvgDtNs6QbIZNbujw1M2cycnfSvTE4ACTdM,11093 +resources/gis/cli/pdal.png,sha256=zOg_F6JIWBWlhIJ_lSdUHL27t4B8uAv1_vfRfxNCU4s,3407 +resources/gis/data/ban.png,sha256=xWYTT4GruHqysCWcniW6FbJotYg6ZpYd-IFl_YDgFkc,20626 +resources/gis/data/here.png,sha256=1DFzh7YahflFey6e9KJYipbv1hTuc2_mzfsChLJEohU,12039 +resources/gis/data/ign.png,sha256=gWnPBX_2Ao6VTTqrkglKsMktgy-UTGXkXnlq4xXGQ_E,73932 +resources/gis/data/openstreetmap.png,sha256=mjOoSQwhj4p83jy8wC4iVbltKdLbunGciEW0Bb-FWMs,75309 +resources/gis/database/postgis.png,sha256=cY83Ps8O6y3jrv-Tx3RsB_Z7yFvpSupf0toLJMwWZ4A,53250 +resources/gis/desktop/maptunik.png,sha256=qns-fTcT51Z36YdyQCRrryBUkR-QL-XMVDdyomlHhWc,29530 +resources/gis/desktop/qgis.png,sha256=sdVvW69-Q690ZQeIwswHCzPZkKL9NitAA-teVP4H394,18314 +resources/gis/format/geopackage.png,sha256=OqrngG6Mg-CRZt5Zhzo4qr_W1Hh1iVRjFehS8akn5Zk,57999 +resources/gis/format/geoparquet.png,sha256=-mQX0UB9w0R7ImHfPzzVrQGaMYBR32iWsYkch5lirP0,47244 +resources/gis/geocoding/addok.png,sha256=1_8KijtGw4Qy8lj7FSxogmH9CPcdZCpBkByPRlxGmd8,14943 +resources/gis/geocoding/gisgraphy.png,sha256=7NFX6RQe340qLGndOIEzH8g1woDyCJKvGWVlx-iQubg,72691 +resources/gis/geocoding/nominatim.png,sha256=mjOoSQwhj4p83jy8wC4iVbltKdLbunGciEW0Bb-FWMs,75309 +resources/gis/geocoding/pelias.png,sha256=cZ5tOwtMhmdbPNit-t6UzciXgmkI-ldA8eIEaOtFoC8,20496 +resources/gis/georchestra/analytics.svg,sha256=wx7cmwZcSiq7XfTzndSfABIUJawa1MmCffZC4LyqRWw,14243 +resources/gis/georchestra/data_api.svg,sha256=qhq_veXmUnmqs_D7PMaiLVggsXyz5HYUOXWGF2TWipg,13719 +resources/gis/georchestra/datafeeder.svg,sha256=WDTgddrQgLSN5U0QHxsXs2_8LgnlItzHxNDSqMFtD3Q,18981 +resources/gis/gis.png,sha256=tAtW5Q9mEzqUuSAlEz41ADJf1c1-5a3-zbJhgLodUww,20225 +resources/gis/java/geotools.png,sha256=W4UsutsQhPCDIr7Waq4wb639yqG6kOFHenMgwyJdPSk,46394 +resources/gis/javascript/cesium.png,sha256=gRIB0ZgzwVgeBFmvWF78prSdvMFFXiIK6-xt6vc2Aqo,23533 +resources/gis/javascript/geostyler.png,sha256=CT1QaWB1jW2uUoM5d7ZQ7RVuEESoB4NMvipxf56PU-w,11185 +resources/gis/javascript/keplerjs.png,sha256=BbZ3psCd4bHeng6n0If86wdRd2BkGe74XKz2vqB1Y1E,20012 +resources/gis/javascript/leaflet.png,sha256=Q_xzBTNouNWP0WY4X_CUnBj9DkIthwIKEC5weL5ktak,33970 +resources/gis/javascript/maplibre.png,sha256=SmHjxl00mtrckM77-zpQB1c8rZ_s25xOnnxT8BdU7jc,17381 +resources/gis/javascript/ol-ext.png,sha256=EWKuTPscKsSu82M66eI1VNI8uuz0q317O-YhFuRYdbY,17268 +resources/gis/javascript/openlayers.png,sha256=I4oVtcZMD8fwGaKCjfBe_ctRMKV4-aT28zNxjWs2aYs,13848 +resources/gis/javascript/turfjs.png,sha256=G46HdZTrBkb2on6Jh3aV1AdWjs5bj-Ob4mPNWeSTo6s,7225 +resources/gis/mobile/mergin.png,sha256=3V6Cr7N8Sj18BcRazUTQBv5AWQ4u8qYGtBPi7yBxaQU,5732 +resources/gis/mobile/qfield.png,sha256=o53tjtYfEczXCCpR8D_5R-qIvlbmkxonZCpcVLD9qMM,12176 +resources/gis/mobile/smash.png,sha256=GIfzU6FV0x18CaMoqJ1DH_mgFiWUj3yUs8brySAx_DI,29491 +resources/gis/ogc/ogc.png,sha256=nhpJaUlfryIQFsIW3nnugfBHHPWv2GViJIRk8kkijmY,47648 +resources/gis/ogc/wfs.png,sha256=wwqhbUd8diOSP74PDa69lAc4XD4EkZoG5pshrPtPG3o,23269 +resources/gis/ogc/wms.png,sha256=SAQ-3uMq0x5Pno_Snw5jQtCSkLdVkQUAUqu2GLOLjWM,21591 +resources/gis/organization/osgeo.png,sha256=l4L0NPV5IDzQgCuoImPM-lLJ0BKrVxHiDytEjiArxew,50446 +resources/gis/python/geopandas.png,sha256=v_TPhjXJw3tKxxV-9qRgj0kkjfa13wC5gBvEKfOgap8,8464 +resources/gis/python/pysal.png,sha256=K9_sEPtJSBXtz0utqraGBVfvyrScjOQorlaTxXJsts0,42537 +resources/gis/routing/graphhopper.png,sha256=UBtYNHvkkHiqQbxH2voXtXtIBNEHrAGsfVYHSCRZzmQ,15500 +resources/gis/routing/osrm.png,sha256=-h7jrr1kxLG4vnSmD20YePqCLx3wwNaaa8rDGTpb8zE,23255 +resources/gis/routing/pgrouting.png,sha256=Nn735riMCFC7A4HoaIz0bNFZOFc34Jteuyvg-K-k2RQ,48402 +resources/gis/routing/valhalla.png,sha256=-q8YNxYQMB4HA_v1NAIt7LjfD4iQXsdbTnKYofPizPQ,46006 +resources/gis/server/actinia.png,sha256=6jl-aoOFDHdjoiDJz3nou1_b9_-OTZ7VW3QSlx3Cdyo,78645 +resources/gis/server/baremaps.png,sha256=oUx2IgxGhW6YqgkqwZtcfSfd4fZOM4MU3_nC-LWEGgo,4390 +resources/gis/server/deegree.png,sha256=m84Pfk-CZMvkmCJm-LXGTgUp4XJNXtIuY-kgl5j9whk,27673 +resources/gis/server/g3w-suite.png,sha256=UbIEOuLszDKEWyhjRhfET8vHRX2GNuPvSIwDol9OeTc,22282 +resources/gis/server/geohealthcheck.png,sha256=iRqS9OttioWojjKg-RM5QFSUuDrewlxt2HiSzvzzG_s,28577 +resources/gis/server/geomapfish.png,sha256=dMR9ZCEOWreFUedvP6JpQ1r7yCeeEn5XKD4Q8nVfjhM,32175 +resources/gis/server/geomesa.png,sha256=rqMb5zyfG2kWNib5iXVaToODdtGNseOvxJLXz8MYFVg,58782 +resources/gis/server/geonetwork.png,sha256=tsuxKE8czogN7hUZnVD_5eJ4KKyA0Ox7B8Ix5Z4vABM,39063 +resources/gis/server/geonode.png,sha256=L8ZQaeG1AwYZhMI1ZgOxK5Xq2Gd2C2httU5oQv2QNkQ,14416 +resources/gis/server/georchestra.png,sha256=Yf4ObaauiFU9IQTAW4Z1rt7hgocVCWfEp-ZdhXm-gWo,10304 +resources/gis/server/geoserver.png,sha256=zoBxSyz7Hl764TylkArWJ4_018pgh1QJzyNJnjXVV2s,48330 +resources/gis/server/geowebcache.png,sha256=8YzcC0GXgUQmR770yhx7J5CPHm0zkR0MxjgE6oHimek,26732 +resources/gis/server/kepler.png,sha256=c4rhDdutfWkyAdjuOGwkt8bjgdI8T1kZmozlmX-OP64,10806 +resources/gis/server/mapproxy.png,sha256=IJZEwmONWXD2rYBoTZDJ3r-jxsb-KpDUBS8OBkrqmP8,14626 +resources/gis/server/mapserver.png,sha256=4pryM5k3WqGf5-N_rxJ5t9qZEJLV-2EOV9psRZ3Xwj8,24105 +resources/gis/server/mapstore.png,sha256=dEGiGjovRGvkCa90pi1zFqPl2-S-em7q3GkQC1ErYpw,43649 +resources/gis/server/mviewer.png,sha256=x54DdoTZJLeBJIJ9b26eoqlxrdUr43l6uQzQAg_bcAk,3635 +resources/gis/server/pg_tileserv.png,sha256=F9aAKtxXkoaxORXFz5YAzNltW4qtGoipMkK9IURz-xw,35289 +resources/gis/server/pycsw.png,sha256=JsrA2dS-xuZfE4O7mY0VrMIm7Ys5Q3GHdW69d-xpX-8,37691 +resources/gis/server/pygeoapi.png,sha256=P9Nu5VgyOV8dFYxx4PWIZaXnAFNYc97Zjtq7S_SA9Y4,29095 +resources/gis/server/qgis-server.png,sha256=sdVvW69-Q690ZQeIwswHCzPZkKL9NitAA-teVP4H394,18314 +resources/gis/server/zooproject.png,sha256=huf-1bNbeF4SUu5EbI9dYzCxyXl0nMl7QzihYumNv60,15226 +resources/ibm/analytics/analytics.png,sha256=lTbuwLwDAL94Q43BRnAwHJ7qTpy_095jK_VY1t8QmgU,27476 +resources/ibm/analytics/data-integration.png,sha256=uZT5-tW3jwtNIUhM0r9JXt70KibY-JhNtS1oUcKMCVU,20876 +resources/ibm/analytics/data-repositories.png,sha256=MRAw4h--XjQfPWaxHuzocPYdRamRMoEgw4sl5Uj3qeA,24931 +resources/ibm/analytics/device-analytics.png,sha256=sEwuIGjR3nXmui5ZKHsxQj4agdet2ek1ttCH7B0edkw,7482 +resources/ibm/analytics/streaming-computing.png,sha256=GZF9K8-dYU77r7JzDdfgeQr7ACbO1s4JRsoWxR8jVOM,26532 +resources/ibm/applications/actionable-insight.png,sha256=0HjWf2HOfi-Hgt7MqudUy8zqBTLaF9JPBnGxIJec_hg,28708 +resources/ibm/applications/annotate.png,sha256=Qj1zpEGFHVJM6WHo03Q5qZmvylQaC5q6aHNH5qu3Vys,17895 +resources/ibm/applications/api-developer-portal.png,sha256=G7l0q-OsIdR_t3FMzg1HMmF3YEgujCsOavgOmNtGArk,19866 +resources/ibm/applications/api-polyglot-runtimes.png,sha256=3qF-przSh-7-YkphWTF8-ByV_7P7C0aQ-m7uTHbYCLY,46091 +resources/ibm/applications/app-server.png,sha256=Ecj0Hl4lkpisqZUwGX2cfxxVHmdVQQ05LPJ6FJxnzuE,14442 +resources/ibm/applications/application-logic.png,sha256=3kv96LKIlqefeLBftEBoaAcciMeh8Qdh84Ttt1OSplk,34982 +resources/ibm/applications/enterprise-applications.png,sha256=q5u2sNLhseog7KssA8kaKSwZDQg0uEdYnlq-oIF0I4M,27689 +resources/ibm/applications/index.png,sha256=edMaovNhF6V-TNskbSP5HaeNqFpcDGfHuAuFZCwsPHs,12478 +resources/ibm/applications/iot-application.png,sha256=x_zGkGEfwQdFDxnawke_-FirIq-909tDerkiVvp_mb4,21537 +resources/ibm/applications/microservice.png,sha256=78G2u4CmMtwvj4SnVTzQOcUcrk1F-p8ekxcmub7pdWs,32337 +resources/ibm/applications/mobile-app.png,sha256=bP0OPwJ5zsK9FvBj9aL5DdRoXdC2masz00rN5E3ZHmw,29285 +resources/ibm/applications/ontology.png,sha256=DK09JX4HOEJRi-pJ-pkADTYm2nID5zg40IX1YJqBKKA,26778 +resources/ibm/applications/open-source-tools.png,sha256=4aTPHABMIeaGibLpIis7cpRRuPJdLSuDe8grAP0fvkw,12870 +resources/ibm/applications/runtime-services.png,sha256=h38ihcsKsDH46Y_t4yWfIzQNI8beKidT9a5m_-NncJc,48709 +resources/ibm/applications/saas-applications.png,sha256=uAaq4GHhbxl5d8_pJUJcxFbL_qhspM1x0tdVC6z4BYY,25170 +resources/ibm/applications/service-broker.png,sha256=nhDmKZEcHOvKcwe4oxiH1CaoEA2hd-rUPM3GOojsKps,32042 +resources/ibm/applications/speech-to-text.png,sha256=n5iFk0a1cUgLLAin4tRPHMY0rmddtJBF9Zf00oCQmr4,16995 +resources/ibm/applications/visual-recognition.png,sha256=0gzsmJC2ykCqx5IqnQJYb1udnAZ-eXLFmvpa9nU8jiU,25143 +resources/ibm/applications/visualization.png,sha256=7bo73dVvaRS4IuGoCwvHjB7Qt1bnSZsBvQn5IuDhPoE,31996 +resources/ibm/blockchain/blockchain-developer.png,sha256=8XjEzNEb6vBhQCq_sB0IDgGzZKgLxctIt0s2o1VzaBI,20342 +resources/ibm/blockchain/blockchain.png,sha256=_KMmc-EkEHpFJqsOMiK5H_solq6_xamhIKQJiOOmCCA,37618 +resources/ibm/blockchain/certificate-authority.png,sha256=8_omNkZHmJkT12FULbHjZ4PKQjiSszm6ogpfnmaj6MA,29953 +resources/ibm/blockchain/client-application.png,sha256=hlAwGYSR8prlhHFWtk5IATBoo2CBNpWaKQF8sz6jMFM,26937 +resources/ibm/blockchain/communication.png,sha256=QzGWNQh_V6mGFaWR7oG5agC1RVXUiY8nd7wp48Y8RfQ,22215 +resources/ibm/blockchain/consensus.png,sha256=-uITQ3Su6NyEsJsXx-AdRGJb_TpWjl_Ce8wxWIjnZc8,27099 +resources/ibm/blockchain/event-listener.png,sha256=i-wTflchdmR_yHas-MMY845GdLw2a7rBjKEZcnVU100,9399 +resources/ibm/blockchain/event.png,sha256=C3MlK3hCRG4HkM5bnYpDTnJiZJvN5LB_js8PuoZR-OQ,22814 +resources/ibm/blockchain/existing-enterprise-systems.png,sha256=PNDdtgqTekg2VH44sX37UaQb2cX80EG9YPIUKHw-4ug,43039 +resources/ibm/blockchain/hyperledger-fabric.png,sha256=DCwgrQCZ-UxjJ1LiEWhgqTHAF78BdW_4b0HuXsAeNFs,18230 +resources/ibm/blockchain/key-management.png,sha256=upYOqVvNQXcSRS4t9UwnDdJAWITt0dvpxw2xx0kREho,30024 +resources/ibm/blockchain/ledger.png,sha256=rzqaWXsgrmzAc708nvMPHfcELFRmpi_9CVlirfEKNmc,36688 +resources/ibm/blockchain/membership-services-provider-api.png,sha256=khF24SX93XviijMMmv8crvsX3EcWbk5ndNhVjDr8XiQ,39628 +resources/ibm/blockchain/membership.png,sha256=bR75f-qcDgau0d_eA7yBG94N7SOcNltiPiHzleKQGd0,28700 +resources/ibm/blockchain/message-bus.png,sha256=wclHgxQW2W4nL_jd8tdPhkSI0f4InTSchq1MqmCHcQE,1158 +resources/ibm/blockchain/node.png,sha256=yWdYR2B5lojDLlbmU5g7vyUrktYxJv8mnlFREcsJ1wo,36363 +resources/ibm/blockchain/services.png,sha256=e6O7iBEWQYzLBDeKdimEyzSd6INcy1maQCycixFvsRU,27993 +resources/ibm/blockchain/smart-contract.png,sha256=zQcvyPeagYVrID5Iqy3JgEk1N75TSUDeDFRKKHSTauY,25229 +resources/ibm/blockchain/transaction-manager.png,sha256=ao-Wf1BjAlv5ZIC0EGHYceg2d6agF8iGxmN98F0yKzY,21394 +resources/ibm/blockchain/wallet.png,sha256=Jv6XBhsZrlSoJ2aMAgISvU7tIyID0sfQ_GsHvFvVDhE,24847 +resources/ibm/compute/bare-metal-server.png,sha256=4wDv_lBQ3FYX0P8Yoq1nJBqV-9VnsFbQte4mRHIszrA,2011 +resources/ibm/compute/image-service.png,sha256=v2NrIDL6KZ5tNfFFy_9se2DEDudM5bMg0C6VzgQTfLE,2815 +resources/ibm/compute/instance.png,sha256=3-Kk6QDsyRNHZEzGIdTuuk4Gm9IMgh7vXYg_stFQ2mc,15297 +resources/ibm/compute/key.png,sha256=OBade1NT4axIwTMX8-ST3Tkgb855hHGVZk7BdeCgRT4,2674 +resources/ibm/compute/power-instance.png,sha256=4MBgrV1TdaiKfMQQCzQ6src4twghosUQM5r96IRR5oc,5770 +resources/ibm/data/caches.png,sha256=9B0gKsYiyCvDmnejup5k2oleDmgZ5L6hFbaplyMZC-s,15886 +resources/ibm/data/cloud.png,sha256=jYQA8tapcGjE5VzitMuNZnepFXrPynO4-4E4C4cANNU,10661 +resources/ibm/data/conversation-trained-deployed.png,sha256=VIKHCOJhYTaRoZ69dPuEUqbb3Re9mkccs9VL0VVLgtg,32569 +resources/ibm/data/data-services.png,sha256=ZJMOUFlcovT8SQevpfqhkXJdwaxcPQqFAiXMqXf4clQ,13245 +resources/ibm/data/data-sources.png,sha256=WJJD1ZAwwjrdE2hoLFil1dJYp4WFa8BClwCeekPRdF4,23606 +resources/ibm/data/device-identity-service.png,sha256=2Fc45M-DRj-jwyzU8473K-j68WpuMzmsiJb7_JbLRb0,31249 +resources/ibm/data/device-registry.png,sha256=N0W4580jSQjwKfrtl9l1_7yPa_5uW08bGnI7CzI-rnQ,21867 +resources/ibm/data/enterprise-data.png,sha256=ashmMikqwLH20CYNEC3pZOtldEGe7FHjNPVCm-6qU3Y,18553 +resources/ibm/data/enterprise-user-directory.png,sha256=SeFcyPQdSD5H8LMZBGGm754ZM8wuHrujO20VU8wkGDE,15823 +resources/ibm/data/file-repository.png,sha256=QZ0HSDT_ZtK1RCxC_iVypAFJ1hw9nwm_6Npoa3JlWIo,20042 +resources/ibm/data/ground-truth.png,sha256=Ul-RnGY23skoVduo9fp6SKrQuV4mmEUnj_PIgzrjVrI,37101 +resources/ibm/data/model.png,sha256=WHxaxEwYdOogOcEm5oQkcvR9KHofvDqX5Mtv4Oj95V8,20215 +resources/ibm/data/tms-data-interface.png,sha256=laR0YRVJhROCtrKaHymZdZcwcExUINi5D2LMZFOMu8A,13268 +resources/ibm/devops/artifact-management.png,sha256=HSn0clQthjpxLidQtywPfHBMT9MwNurGPhllh-V-LmU,21280 +resources/ibm/devops/build-test.png,sha256=R31dfOja698OBWccuz65waSjoHNtoryrxDxwv0Veeb0,26134 +resources/ibm/devops/code-editor.png,sha256=nXajK3D-4gciONNAK_CdThPj0keJYn7sys4kaZ0GAqU,26142 +resources/ibm/devops/collaborative-development.png,sha256=6LOioasPzBI97wg57xBh1Zj--8CDRl6sgxwZKJFrNTM,28636 +resources/ibm/devops/configuration-management.png,sha256=_LV0G_9_Hr6vllQsJmrDADFf1uF3nqANh0BbPQ13pck,41047 +resources/ibm/devops/continuous-deploy.png,sha256=at7-zsierV0GZA0nitF-U-25OzKiQVgxo7mnc9vJ_d8,23661 +resources/ibm/devops/continuous-testing.png,sha256=FyhPBlm8yfabbbVQaFAT6O-6II1JyAqjCEkM-Y7tbJc,27336 +resources/ibm/devops/devops.png,sha256=CNkeZXp25RmgnnJhBgdHgHeXMECqpsQ0NudRNXGKH_s,13743 +resources/ibm/devops/provision.png,sha256=eJZYhXrCBkOHGKR-aewiHTKFi6o01TJRVWNzfLJ7o3Q,26106 +resources/ibm/devops/release-management.png,sha256=kE0HisevkpDaXgXAWfvvLFeoAkwveClfzVS3-3oumC4,18888 +resources/ibm/general/cloud-messaging.png,sha256=zN8Rso1I35J8xaQj4-JD7dfo8G-J8-s7LcYg5TFjWxc,2390 +resources/ibm/general/cloud-services.png,sha256=7jRcpw9N1-tFFIn4tHVfGzEVNodI59oTYMlflRw0nWE,2726 +resources/ibm/general/cloudant.png,sha256=8KDvtXJ_8iZeMMdJbqKMG1uAjhHMJqBxTmH-xgf9JBg,1657 +resources/ibm/general/cognitive-services.png,sha256=z7FpSMuNOW9k7bq-GtU2rm-KCZtpHXbnxRI8-iDx4tQ,71616 +resources/ibm/general/data-security.png,sha256=uoMi9ridCtlNxo1BgYTR1rKF-WWIe5EMvVUed0MkgMg,1745 +resources/ibm/general/enterprise.png,sha256=N2sakHmfhi4Bx2uDH2ixqD34UqOrmEkx0HEyvwJRV2s,2425 +resources/ibm/general/governance-risk-compliance.png,sha256=XkUKLLi-KrTKi2u2iBPY7Dg_OpTY2x1-pT9v98uA0Yk,1357 +resources/ibm/general/ibm-containers.png,sha256=sohUVLrZfxuz8o0hx6K1EzE3eDBL_QmOCQ7QrxW2hiY,20029 +resources/ibm/general/ibm-public-cloud.png,sha256=0lvl1xpEA2P6MrX64se0GYRil5MwyFE8zwQCbanzHkw,12311 +resources/ibm/general/identity-access-management.png,sha256=q04OjEOa8zjJrELmTMMqdWLQU-GjaS8xHzwSRPgSfHI,2363 +resources/ibm/general/identity-provider.png,sha256=zQWHOdYyxHpu7mbvCBFG3PPXYv39RBMEnsRG1GdkIqc,2295 +resources/ibm/general/infrastructure-security.png,sha256=PX2XmPucK-qB1NHefOnom5mV3jFClMS53Mgu61iavvQ,1751 +resources/ibm/general/internet.png,sha256=2ucDRkpG4wiydJBbOt0atOR8sYoYTyicTLvKD-R-4jM,28733 +resources/ibm/general/iot-cloud.png,sha256=MkBx6-NBWx2YaIGg-nRd_tEmsJhFe5DjKM_Vg88J4Zs,12311 +resources/ibm/general/microservices-application.png,sha256=MeRQKJY2N_WzwHv_8ojf2W-gUEJV4iGPLVboyNf_ov0,36562 +resources/ibm/general/microservices-mesh.png,sha256=02ricX4GTyxZ1Q2pC_51gRAGfY0aThqkLp2t5NnkB3I,2473 +resources/ibm/general/monitoring-logging.png,sha256=e2pK7lPjSWPeUedxAzxtuVqSwxYOyiJDRnUu1vwecUs,1458 +resources/ibm/general/monitoring.png,sha256=lJ7XboJCzV8Fbg5CJGZCPMglRDB6FBxGnbmvoZtcXvk,1904 +resources/ibm/general/object-storage.png,sha256=vo753Aj82MZ9ZWcIH9zDjncQtx5bZna0N-xDCOSSxMk,45329 +resources/ibm/general/offline-capabilities.png,sha256=TfASEwV5Tr86Joz51wWSuYDJM4v1OuIRJwEXfUIdJFc,11381 +resources/ibm/general/openwhisk.png,sha256=JmalZt-2M71XP_ePnxdMgQTeJemr6_Xtf-gfl2m8b_4,18257 +resources/ibm/general/peer-cloud.png,sha256=TX9HAh0V1hiA-WhqvqTNBN9CURi4J_ry8mq-bPdlnoo,14588 +resources/ibm/general/retrieve-rank.png,sha256=nn597EKBLkTB1HDJ06VtgsNgyyvecmv9yDCMz_O2J-Q,23369 +resources/ibm/general/scalable.png,sha256=vOXWXz2Anvsf2YU6byvuevmq0Xb2GZ1bOU58ZuVIRk0,5609 +resources/ibm/general/service-discovery-configuration.png,sha256=1traY0oBqMZOlKGAbuw1W4t_xl4bi-j8oqkzBOdXhUw,1705 +resources/ibm/general/text-to-speech.png,sha256=qf9p2UkR9KLz9x-PbitrSCuzUqhTgLPQFL_OAz7WOmQ,5883 +resources/ibm/general/transformation-connectivity.png,sha256=8hZPrBWjlyAqd8jINNCVV2JU0Cg_Bdkia__pwdrNU2g,2004 +resources/ibm/ibm.png,sha256=tXqUTNqdZL_0Zv6eOOV1S3LaAJhIAD4XINZmVIOdGw8,9526 +resources/ibm/infrastructure/channels.png,sha256=RBMpTNB44zo-fulKm4forVNrkS0gDZTE-jleNJLHyqk,34429 +resources/ibm/infrastructure/cloud-messaging.png,sha256=itVlB0dBKvCasPqkXDMdA9T-kadme9KXpO1VxGjNlPs,31043 +resources/ibm/infrastructure/dashboard.png,sha256=xZtRaD0YbI7EB2uFboiBDyIyVAwm1tRvGC-MIyj1uI8,21195 +resources/ibm/infrastructure/diagnostics.png,sha256=raLQFJVj7ibCQ0h_PFqGToJiy9D1m7XtC_0kM-AwX44,12455 +resources/ibm/infrastructure/edge-services.png,sha256=dfvUBAg8LqOkaKnoEHY1QxOAKxyeAzow0YCwABalJ_Y,47948 +resources/ibm/infrastructure/enterprise-messaging.png,sha256=OBvxHSxPq9yRHIsTJgsTy6zPs2TN2_AqTL-BQV-4Y0k,33110 +resources/ibm/infrastructure/event-feed.png,sha256=tdBlhHEnH1gOfdTcw5YkJxJAwm5-_r-tjOrhIMdIWH4,31199 +resources/ibm/infrastructure/infrastructure-services.png,sha256=Kn-I8ahM2yfySOmdm-uH5mrG12BLryEhTI6Xlo-iG5c,15434 +resources/ibm/infrastructure/interservice-communication.png,sha256=6hEJwcgT91EMbEnvXBWPCGBx3Cmg1bkul2FDb_ImeK8,29947 +resources/ibm/infrastructure/load-balancing-routing.png,sha256=omKP5t8oF5rCMmvpSz9bOoqBcd5OYTYWrfnVqSHlOOA,24271 +resources/ibm/infrastructure/microservices-mesh.png,sha256=qCKI5iYC9kvN2YEtrADUF9Cr6t3SlcCg4uQOZC_wKRA,40444 +resources/ibm/infrastructure/mobile-backend.png,sha256=Jd-bvqn6lWJCqUJRaz6BQK3in6icZwkGX6OI2enK598,21544 +resources/ibm/infrastructure/mobile-provider-network.png,sha256=SVf4bpI7H26tVUAaYk9coF9kEj5yNsOfp8v9X4qQygo,47953 +resources/ibm/infrastructure/monitoring-logging.png,sha256=4HryPcxlh90YK13t3wywOgZu7QYV3KxYKg_WtWwSsUs,12359 +resources/ibm/infrastructure/monitoring.png,sha256=357sgZ8muAzze3aBUrYVORBHYXLh574wCjB8DsgWA1M,28635 +resources/ibm/infrastructure/peer-services.png,sha256=Fq5vRbm7887fGElOktFka5VofMORfi0dY49mVGofzlU,43186 +resources/ibm/infrastructure/service-discovery-configuration.png,sha256=ESgkda-52Fr41FWFLU_ovMEBBwxNCjVXYsq5uwXkkWE,23684 +resources/ibm/infrastructure/transformation-connectivity.png,sha256=OoTXn2OCZ97m67zAlEI7r3h02wBk2j8UFa49n_ATrOc,26227 +resources/ibm/management/alert-notification.png,sha256=YjgAqDd_s8b_dHgPzxnHa9EsnPGI9E_9plvHDc6vAfw,28683 +resources/ibm/management/api-management.png,sha256=m3Upu2cArylsEIs87tiQ6AHR_JTR8TvlKmEChkLzoQI,44756 +resources/ibm/management/cloud-management.png,sha256=4Etm8cA0Iaos3vkGTtR8H7sCxdXAISO-QkEA2G49dH4,40171 +resources/ibm/management/cluster-management.png,sha256=cafNVjUZiwu_ayhtuJim5oDK-AkSL6le5ooHe639JkI,43524 +resources/ibm/management/content-management.png,sha256=fnro0O4HcMJZ1thvgRWKflYV3BvPR6hC_vOe_WXrXRM,12649 +resources/ibm/management/data-services.png,sha256=QmDo0hDjq2mlyK2rnXhMnaGYUiF0dxRYOJbmvPN9LMo,36895 +resources/ibm/management/device-management.png,sha256=ktSIa7byICM6Obl35ZuhqxLr5wJAXIKYu6DhMJw9aCc,16432 +resources/ibm/management/information-governance.png,sha256=QeQ8bgGJXmNGCA0mtk6NFx0WXS9hj4_c4TlsM_I4AkY,12211 +resources/ibm/management/it-service-management.png,sha256=Q-Sh53m550bHKbiQqjs0-5F4vBes6LgjI0YduVP-9tQ,38941 +resources/ibm/management/management.png,sha256=5piPgmOG21WKmsba71n0DmiWQEdJyl7F5tZKJJg0Ntg,43527 +resources/ibm/management/monitoring-metrics.png,sha256=4ZentcMRoqfCuiEtfecqLa_ycxYt_xiYZEzPYAPG5nU,12055 +resources/ibm/management/process-management.png,sha256=nXqZuOQrkUIMO5tR0EjSWhmA1bD4TC5Ch77kwKdOrKY,36328 +resources/ibm/management/provider-cloud-portal-service.png,sha256=0dzUmZ3W8qneVPxEuJKGC5_lnCTxli8cd2DZgNqYBPY,26586 +resources/ibm/management/push-notifications.png,sha256=3DfwJEkX1IvITImb7Fj4nu7geybe01MFLz8co5UDszQ,19221 +resources/ibm/management/service-management-tools.png,sha256=m1uFURy90RzGa6iEcq-H9qYiT4cAOW551xTwdU2jArs,46034 +resources/ibm/network/bridge.png,sha256=maTXfF-pwyIeYA9h6EuAG1LkFiSV0sRzMLy-nyksEbk,2323 +resources/ibm/network/direct-link.png,sha256=FmMA-XmJHR924E3nUbAV7D8O5o085ELpLgBGyh2v5xs,1661 +resources/ibm/network/enterprise.png,sha256=iqF74AJPnFZh457KE5GTCPK8iLXY2LjcatdnfplFIoY,3924 +resources/ibm/network/firewall.png,sha256=yiBQ82mDQWP2ACcv8pHpiu0IADsfflgrJqPXQ-ru9Tw,1588 +resources/ibm/network/floating-ip.png,sha256=UFq6-27uknlWGlW2J74hv6S5Vj0R5pNvXxxK01gpbNg,1194 +resources/ibm/network/gateway.png,sha256=yktt7Ssqxj6M2Rm09bNVIiBzxsgBuGDA3Kc7vM8WGZc,1529 +resources/ibm/network/internet-services.png,sha256=GEfinBLQqWgfPkjIfHKVhrn8h1eNYbfZL1P5FC_VvTg,3752 +resources/ibm/network/load-balancer-listener.png,sha256=tM3lFrmqDWKCLzlMtkVSamz3GrZfg9b2TbBx9tpBVFg,4239 +resources/ibm/network/load-balancer-pool.png,sha256=dxLcdc6sVmOuEuI4v06A8GTOn6eFIKfw5wCa4MTMr28,3432 +resources/ibm/network/load-balancer.png,sha256=Mn_iuTV1Dn5lQ51VgOafSl79ugj3hveFYQPioI8dA_Y,6057 +resources/ibm/network/load-balancing-routing.png,sha256=7Dv8v7gawwjU9fFlwO1jWDd8T3hm5Q7Psl46wTm29Mc,1898 +resources/ibm/network/public-gateway.png,sha256=2K7a1eAF2Ufiu2N6iXQcNXebs1FT-vJO4aNCtn7m2OA,2365 +resources/ibm/network/region.png,sha256=CVM0whmbB3uCUQJLC9XYI0p8DHUmqEAwiTmTPNTqZxU,420 +resources/ibm/network/router.png,sha256=PPYaRiqyPEa1kRfzpdCqcuajHXBeJ5kIm_YkCocjQSs,2566 +resources/ibm/network/rules.png,sha256=w_CIY7RXq6lL2H-pZgew6OnvxZktnv4e9PSD82haguM,2457 +resources/ibm/network/subnet.png,sha256=gkGRPHFYZWbOcwjHxbSk47lBbF3y2nsYhvTPyNM-tuE,892 +resources/ibm/network/transit-gateway.png,sha256=XZ741GdqFUKsmZb1uwhlT6YJiScyzA5PyjY2JGh_0dk,4726 +resources/ibm/network/vpc.png,sha256=fNmIWKDYpxXfqi9Vz2hHSRjksnwJhX6iOx3GhDsyZ28,1267 +resources/ibm/network/vpn-connection.png,sha256=BDl_9aHWmHs1PKmz31AGE6vmpKKkRPAgku9V5FtWWOk,3022 +resources/ibm/network/vpn-gateway.png,sha256=MLPS1RwlUuoizsnmdC_ZYdBLeOA5sCfsNZVP99393Iw,4094 +resources/ibm/network/vpn-policy.png,sha256=9zEaO5ZSSc40iY__XMMu6pbkgWDqh89wYCr2RVw3WnQ,1727 +resources/ibm/security/api-security.png,sha256=hRZrS-rDTH2yRs6dZfhmSEyUArKoc-oqZFTNGwu_x-o,17689 +resources/ibm/security/blockchain-security-service.png,sha256=1WDZAoCI9A1dred1HPcbxMLlfihqoFJ8K1j_ioxhZp0,36814 +resources/ibm/security/data-security.png,sha256=WfqQ98jzstTYUvRxHaysZX5kymfD6bEMlBmQJGCJ7es,24401 +resources/ibm/security/firewall.png,sha256=Wm-CefCRkIU-JUFGniQnB71omb2yz5rhCEOSobIvMG0,25941 +resources/ibm/security/gateway.png,sha256=XNh8DAPJs54gfDtPVGFyigv7vgHw0Ioa7r9-SN07eD8,21562 +resources/ibm/security/governance-risk-compliance.png,sha256=fbsonmzmBCaV5Nuw3Urao1amtl3tGW6Xxg9gic0zQC0,21414 +resources/ibm/security/identity-access-management.png,sha256=r2eg9r66h1J0MQHoOwFq1wSZFgQHSGX5gWqrbiCMUtc,39230 +resources/ibm/security/identity-provider.png,sha256=o2ks6oSMt6jg4wpiFVpSpBCvnVMf33pCF5BijhENuIU,34789 +resources/ibm/security/infrastructure-security.png,sha256=n1_fFhVemFJMlZYVc8_8Os9wPXjL-Qr44p0U72eZ8mU,27842 +resources/ibm/security/physical-security.png,sha256=DLVt9Dv14KZErUzRJb7P0wFF6VZGxQnOSp9tjxSGA3Y,67622 +resources/ibm/security/security-monitoring-intelligence.png,sha256=NJpb2aVUGdOchsh7riIFKnWuwZaYqkM9xh_W4jgqpTM,32629 +resources/ibm/security/security-services.png,sha256=YuMImz7kLziN-VmmIF0clhHAft8yNLBv2WQfDNsKYOk,30797 +resources/ibm/security/trustend-computing.png,sha256=Zp1eFLsyieqVEcriYNZ79zCis-ykGJCKFNlqOyhs0Hc,16355 +resources/ibm/security/vpn.png,sha256=gYADJsEnArucY70YN0Te_7t8rldfRDK9hO-JmQPWEr8,14061 +resources/ibm/social/communities.png,sha256=T4nU9DRLZNY-e4wh4vxYBRmFAOquUk4mIoJ2YsV9WvQ,21314 +resources/ibm/social/file-sync.png,sha256=gZgfq9MTohWyzA_2z6P96g0t71YCRxvcjlVTobm4fCw,19820 +resources/ibm/social/live-collaboration.png,sha256=JzuFR_UQX19dMmvhksVvMgHQBxwbslID0O5nKbx8TWU,38083 +resources/ibm/social/messaging.png,sha256=Pjkx_2tXH53gBkXLQMjDgdgfl-pdwoJOk0R4uJm-TBs,19966 +resources/ibm/social/networking.png,sha256=7pwEtY9sFbD_zQTRg6hAvhZlheI9GlEi_GwhFYdGsAs,29837 +resources/ibm/storage/block-storage.png,sha256=2_MYwMFilYe3sB3KUGICwlvlnvx_VtqIiQA60Nbu6qU,23518 +resources/ibm/storage/object-storage.png,sha256=zC9H9w6rYzF6R6-WKdS3JMUj8wP7RIeW_iU2jaImt4E,3325 +resources/ibm/user/browser.png,sha256=lkrklBeq_GJi-NHMqCHPM5iGixuocY_O3QTEY5dyB3U,1032 +resources/ibm/user/device.png,sha256=rw4MeS3mW6MTT_uCGDcJY8mWnXqGVLgrD-1xZdIANRs,1370 +resources/ibm/user/integrated-digital-experiences.png,sha256=vtG79MBBIN4e8VOxUWki0JWsJT_STkVpEOwgk8hn320,1854 +resources/ibm/user/physical-entity.png,sha256=jWkhy4aHxL8Pfgi7kkG07rL65WveE2aP5QaaVpspp58,2854 +resources/ibm/user/sensor.png,sha256=gsvX6VKHEg8RMEOpbwKrQGA9GJNAaSQqthLnlvmGQ3w,2251 +resources/ibm/user/user.png,sha256=-nhNmtz2H5EYXx9xLk0VOVkkOJvZchaeMFT-ZaLPXY4,1681 +resources/k8s/chaos/chaos-mesh.png,sha256=rvIo7YR-_TOsdx5W6EJfg0ZlnPSYSJckH3k9lOLkWpE,65069 +resources/k8s/chaos/litmus-chaos.png,sha256=0IzicJtsRFuTH6H4XeDdFAeJ_l-ozc4d6-nisjJkYpc,2971 +resources/k8s/clusterconfig/hpa.png,sha256=P9AK21Ah4gWpMgLmB2-9rI06s9IDnU9SQtdisXN_DbA,14641 +resources/k8s/clusterconfig/limits.png,sha256=Exc9rNRVO2oF3O7wlOQfxeCI5KQPCCy_3dNwE7OXjBw,11962 +resources/k8s/clusterconfig/quota.png,sha256=gmjxFPfJSngCr9JDq13-WQLWyNNTT6jQO1PaYdzbl8w,19134 +resources/k8s/compute/cronjob.png,sha256=9P4ket71cam6i4m4zPQqN9y-ZAo2k2DP4IcSngXBr1I,15660 +resources/k8s/compute/deploy.png,sha256=-lADUr3-jIawuNBSiVqok4X_gaehBtbP0qMk-mSYl5Y,17128 +resources/k8s/compute/ds.png,sha256=-RYjMFaM5HMQI6sNooaV_TMbrFMuoEN4cgwsvUBCRRU,12132 +resources/k8s/compute/job.png,sha256=ZHt9Ui_Awn6bAxrIziWw4I52nov0TwoKdYNM3jN76kk,11657 +resources/k8s/compute/pod.png,sha256=QUS4HmiYh65Nz09iNberxPoD2oOOrO7x5NLNbAhKnLo,13799 +resources/k8s/compute/rs.png,sha256=yqBFmw2c40R7fWEq6t3kfpc65vtc0PHc10COAd-OQDs,11658 +resources/k8s/compute/sts.png,sha256=p7qicpy4URAU2QdJVA7CUy12AoLhNyHMi0DCNEUEK4Y,14337 +resources/k8s/controlplane/api.png,sha256=S9mb1qlBoo9RzSIBbma-MM4Gxw-yWxR-x42YXAJlA3g,18870 +resources/k8s/controlplane/c-c-m.png,sha256=HkfvQirp4AnYmKsfzsHoDBMFf6aLOMJPY9ZJphpXNY8,18694 +resources/k8s/controlplane/c-m.png,sha256=fgmaskHUwy54ubuQDFfZG08WY4vVDvJZqvXoIGIWA_8,18658 +resources/k8s/controlplane/k-proxy.png,sha256=BbqyG0YVT9XDmYjC9H0RTkKmfh62obmbnWC77BhRMCo,20655 +resources/k8s/controlplane/kubelet.png,sha256=HIqGLiKSHTJINVYsIUhCe28OsLv0vVZnk2BGVINtqYE,19687 +resources/k8s/controlplane/sched.png,sha256=FgVvy0aHcuicdzTBUtMrizteDgeQ5Jm8hN35I5V0ZnM,20273 +resources/k8s/ecosystem/external-dns.png,sha256=xjeig3PnMzI2hl1Nd84dUM3as1K5gexgU7fLZmCHMVE,51875 +resources/k8s/ecosystem/helm.png,sha256=jx1pcJMnxHqy8kSJMwEa-tL5KL90UNdSbZA6HMRU_9U,18881 +resources/k8s/ecosystem/krew.png,sha256=aV87oVnt5jyP0f3QBa8rkK-l1mitrC7I7V_yEZRQYvo,21159 +resources/k8s/ecosystem/kustomize.png,sha256=Qa4mejlLgXqp-_WtibRYWxM9pNpdpeegdnBWI-d2oy4,5815 +resources/k8s/group/ns.png,sha256=v3r3VnRug10TFvJPfuuZpWR1MMR4bhFupXDAtJ9Uo_w,12060 +resources/k8s/infra/etcd.png,sha256=yJS4rRDnI8TndZN0ZnN8_CwlTVlPusGIn9V1UHdXyZU,19065 +resources/k8s/infra/master.png,sha256=0JvQt7LFfHaIClIMepNEPd_hvVcPhRUjhSwjnoE9E5E,20410 +resources/k8s/infra/node.png,sha256=RXacBGN3_-Eq2TpfzNXJjZIaU8lQFAzBsern7mWp1B0,19679 +resources/k8s/k8s.png,sha256=_0ffWXEoO0R_1PYVeTQCiEmINrizTeiFm8AnwlY79do,18069 +resources/k8s/network/ep.png,sha256=iYySDsiqBg3Su9hHALzx5rSwXJ2a7P1k_WH4DUwYYtU,13871 +resources/k8s/network/ing.png,sha256=N_OFyrycbxcTHGoiS1yHfNrETP0e-_wZv188UTnNXk4,15666 +resources/k8s/network/netpol.png,sha256=FoUfjRIzWl82ZA8zURpP7ES3v2tLFdN4Dgv7S5FOGdg,16552 +resources/k8s/network/svc.png,sha256=GbjsCy5TEE_28YdPOWQkc9BJMIZG76zLhabNRy1NNzI,12015 +resources/k8s/others/crd.png,sha256=CpR6Cpza3iIeYwOkKA6N1WdP52blxi5MP1SD71rrcKE,13298 +resources/k8s/others/psp.png,sha256=_4WQWvA3V70Cw26gdDhqNEaRoPGn0n-EqYrWRdOZQBA,14995 +resources/k8s/podconfig/cm.png,sha256=F56__P7ZwNBCo7kF07__Ao_wbIyKYb3Vn0a4B1iUMjI,11165 +resources/k8s/podconfig/secret.png,sha256=TVWHkhil7w5IB_L4STJoUtamKzqZRiVdbL1A5-Pwzwo,14308 +resources/k8s/rbac/c-role.png,sha256=zbLKG2L-DjBdWkwp8FD7t4n0TNE7Qz-Wm4QusjBvDgw,14797 +resources/k8s/rbac/crb.png,sha256=juqm7FNRFxBPohJNv8_jjgvjd9aGRt5bfVGg82bXre0,13257 +resources/k8s/rbac/group.png,sha256=pNANJgds1wRreSLv4jQBxEfTngYcwKSa7JGHoNgSeCc,15148 +resources/k8s/rbac/rb.png,sha256=uZtSIeI_-UrsZYDj49DAccFHiYH3ZHLwbaHAhE2v6vA,14294 +resources/k8s/rbac/role.png,sha256=duSwpF4mir1f2MjOKhuC7SyasYTwk2gk6RlVOoIJKPo,15894 +resources/k8s/rbac/sa.png,sha256=3j-kXfyR5ODtYr4-x-Ej0I5KGxAFNDD-HXx7LniTPOU,15492 +resources/k8s/rbac/user.png,sha256=RSIJrk6FrQBko2Bd-ZX_KmmyHM7rkQ2ry5V-E1XI-v8,13753 +resources/k8s/storage/pv.png,sha256=5oBQP5dCkjmzkgAofNYUrf6sXfPbKPSQgBmvRq9LPEs,13556 +resources/k8s/storage/pvc.png,sha256=y4VthY1CrSQlTJ3ADkt9uefg69zUj3SWZ655e_3iN_8,15896 +resources/k8s/storage/sc.png,sha256=2T6wptVSSKeAKz0Fa82KSkrbHIQHpIcbeIYZpFXKNME,18190 +resources/k8s/storage/vol.png,sha256=OGELZbH6repzsLBdDvCmjjOnNZeR5CGtBfm5Qq4TbTY,13664 +resources/oci/compute/autoscale-white.png,sha256=69G7ZORcHo80Zjj9DFaqM1vb-Bpyms8gI4XcyKzGNgw,9799 +resources/oci/compute/autoscale.png,sha256=LTA4CLK2hGaJKd8WDJarAp70CRUCwd_tSdGRFCHckkc,11385 +resources/oci/compute/bm-white.png,sha256=WxtCoByaw2q43UfyIkiimZw6OaJ-I0LN283hnnj5s68,7549 +resources/oci/compute/bm.png,sha256=1_YdWX30R5sUNYwaL2m-qqLKuPOMGwaxEt_zif5giHM,8170 +resources/oci/compute/container-white.png,sha256=1egp9tRKeby6W2XuzB9HbRKpLZ3_WITHKbC2CxGmR2I,8008 +resources/oci/compute/container.png,sha256=IH-k454zHFIMkXJucCefcuGMrvtf-9G0UvaUeiE6Qcg,8747 +resources/oci/compute/functions-white.png,sha256=Uh2LNXAcxorWLy6W1a154zjUBpolsVDMiggv9ljTTkQ,6726 +resources/oci/compute/functions.png,sha256=izjYpYE7114o7DB51HkBScFQku0ho8ModMote3rr5C0,7179 +resources/oci/compute/instance-pools-white.png,sha256=_YOFzPZS8pd7XjI4LicbMUml0vXT6cEb599YEuIkuhU,9713 +resources/oci/compute/instance-pools.png,sha256=GY3-PQpIroeoO-e-GSMzGSAsGzHFGXeJp0XjUrIl5uA,10832 +resources/oci/compute/ocir-white.png,sha256=_5iY9FoR9Ms2447oJEKlTeDxlgjLoBNx1WXbgdFkVM8,6318 +resources/oci/compute/ocir.png,sha256=QufHKTzje-mU0ZGb1V_SX-ntNFTT09R1FjhJxs_T2ZI,6701 +resources/oci/compute/oke-white.png,sha256=35NfWicKcM_xen_v6IqaBWlSW3KCpxdBILjF4Azpt4o,11350 +resources/oci/compute/oke.png,sha256=sYBHWSAJLn1BAC5yCkbSbluSJSqng3a2ghtu_NswgNo,12862 +resources/oci/compute/vm-white.png,sha256=FRLwfEXXBrXXKQi3Qa7QwAfvj0fuYfhvy__7p_sN4qE,7639 +resources/oci/compute/vm.png,sha256=K2ln4wcDMSWuFjm69iZN-kW0Q4Ls2qaJ3BaBJwYu55M,8483 +resources/oci/connectivity/backbone-white.png,sha256=WJJQXoRtyhRSiMj0qnPs9dv6LIsRaZHxYOboMJFPrPc,5213 +resources/oci/connectivity/backbone.png,sha256=4V-Sgof0STYOZY4fBjzZXJEShyh0MRqDGWC_i1Fkn1A,5287 +resources/oci/connectivity/cdn-white.png,sha256=V7BDHTqun2_m7t8Ty99QwwzbQhJUuVuHUuQX6WBvSsA,13652 +resources/oci/connectivity/cdn.png,sha256=xDnphF6E-ZPrGIkdzxMM036HxcSSueNR8o4VfhT7I2c,16158 +resources/oci/connectivity/customer-datacenter.png,sha256=zVuJ35anIGcdJhvOfAJ4NpxnDCbeOqyT5GZBxTmlzu4,10176 +resources/oci/connectivity/customer-datacntr-white.png,sha256=muPvJtOHeK-IMcWZEkiFdOKNI7NWJkghpLxm2n0IUE8,9397 +resources/oci/connectivity/customer-premises-white.png,sha256=TBr_coFrMD93mP3gnxkVa1_uhX0E-IS6-BKj6eFplvs,8840 +resources/oci/connectivity/customer-premises.png,sha256=UIYXdoRWlzqqhpih3MVQXU5rcytCQFxXc9cKoJSbKQs,9556 +resources/oci/connectivity/disconnected-regions-white.png,sha256=6Xhoni2h0nIpnwqvXgW9FkZ4pq9cn9SbM9PpIz3RJ7w,10081 +resources/oci/connectivity/disconnected-regions.png,sha256=LdONSaSTzseF5evaH21Bxw3D-cOUAoP2QyTqVbZmV3o,10959 +resources/oci/connectivity/dns-white.png,sha256=mRBSOjXW0QmDbGu3d_BL_xTb-yUNj-bf_e2furC9vxE,13733 +resources/oci/connectivity/dns.png,sha256=jg4qQf-6hTgQmFno_j2x0k25EaF4e0h_qY1if-umfFc,16470 +resources/oci/connectivity/fast-connect-white.png,sha256=h_h7MfAHRL31zXhAo5c67ZrkMWtOYkTHwrgxysTqEAg,11729 +resources/oci/connectivity/fast-connect.png,sha256=j86sGldtXofnoZuh0TkA26sgBg5WBAPg2VVIbjAiLOk,13065 +resources/oci/connectivity/nat-gateway-white.png,sha256=5GMs9wI4JY0iz_h9VA68aS0r3GKU3tTFkjxtiAoGHaM,8406 +resources/oci/connectivity/nat-gateway.png,sha256=sMH23cxeOHErDImvmPVMDL76pnVosx_-b5mc6XNjYzM,9325 +resources/oci/connectivity/vpn-white.png,sha256=7H3pxcdtdju7__qmYxITxeOX6M80Q-Ol2-yGJygFJAI,10114 +resources/oci/connectivity/vpn.png,sha256=iIpq5qipt9GWFOOZ1nDoXLYYVFkfxRE6F0lkTe4x1ak,10985 +resources/oci/database/autonomous-white.png,sha256=alwCd1XP54F4e4iVuxMEJfXb6ji4ikvyW2HcOkv-Q8E,14683 +resources/oci/database/autonomous.png,sha256=HZ_WqN1jdV_8vKAncm6qDOLedRPc6OLgsAaz44HTJrE,16978 +resources/oci/database/bigdata-service-white.png,sha256=4zclga30_ACECGHKXa3mluSI7FHmL8X0g2KLed0J7DM,20158 +resources/oci/database/bigdata-service.png,sha256=1sz3aST-1eW4TuQO3_BeFbzg0iFCjFkzRBzfzMX2JvY,24952 +resources/oci/database/database-service-white.png,sha256=khZfnEQaaGs4ywDI-s4fKGMH6IoPLx_OGSNIpq9_Ww0,7852 +resources/oci/database/database-service.png,sha256=dpO98JBQNqjpP3fo9bugL-k3m47lu5b56pNTmcV6li8,8253 +resources/oci/database/dataflow-apache-white.png,sha256=UitQ2-pbfO6rKFHbmT_JYEGErUNyZ9ivpHwZr_j6-i0,15619 +resources/oci/database/dataflow-apache.png,sha256=IikvR0iG-_svCYjS3cmvsYGYncOgRQUMWM9zlZ3skFQ,18102 +resources/oci/database/dcat-white.png,sha256=X0ArZmMa9MBr51X7Rh2WmivNrOsvxkQOE-M6p3mZjmw,11304 +resources/oci/database/dcat.png,sha256=P7fbRbH0728MyO0Zc-E0SgYpGqpI6SWJJxo3vv_5tFw,12774 +resources/oci/database/dis-white.png,sha256=76bP99Ro1R8mXqVQvjrxy21Y1FKdANOLwS1W9a8fa4k,18475 +resources/oci/database/dis.png,sha256=47k4wT7svCd3ZhbEdZMBaPwhBiMfcn2Jf4UI_OuvScA,21715 +resources/oci/database/dms-white.png,sha256=zduc6x_w3LWweK93OBdtWz62bM4iVnpiMJyWYjUB9ro,9349 +resources/oci/database/dms.png,sha256=18ME1J_AJvdhOqRxuL2LvIXrVAbKQqO5FFmaKG95nQY,10505 +resources/oci/database/science-white.png,sha256=nyA7WvoDMxqXtuv7144Ap5vtUxvElOLkM6EeYr-OkAA,18922 +resources/oci/database/science.png,sha256=pAWQqfxXO549O08MY-9jK_VFcOnVNrhsLgqYbzsEsV0,22526 +resources/oci/database/stream-white.png,sha256=-HArl7QzD3DZ2iwzc3PnNdPVdUNcNMgChY8t35ykcWo,10250 +resources/oci/database/stream.png,sha256=3M6pyqz0vLNJJgsvsdNP-27Y59FykYZmgJQ0TVbxImU,11573 +resources/oci/devops/api-gateway-white.png,sha256=eivI9Hw1OW9gsr1N7kIXnoYR2zWiP1CO4Sl4h0mNX9s,16402 +resources/oci/devops/api-gateway.png,sha256=mINZdj0-ZU7N67YyK_f-XjfSGNusn9w6KzpN9aJ5xXI,19037 +resources/oci/devops/api-service-white.png,sha256=YI4ESbjuyeJ8ySzgrUpi2Sodh85OhF4z-CfwNs8ts0w,7156 +resources/oci/devops/api-service.png,sha256=VeQAa1EThdPQkoeh0biXxi41LtymPv2UUPdGqwcv-So,7820 +resources/oci/devops/resource-mgmt-white.png,sha256=sMen9mm7kfAercncBxxMFQm4681Ph87CvBWzkLkQkec,10741 +resources/oci/devops/resource-mgmt.png,sha256=CzObor7ZtctF_8RXzE8J8ChKh_o_vm9dJX-qxI2cHBk,12170 +resources/oci/governance/audit-white.png,sha256=OFcvgOCQF9zix-dV7BdPBlHaNRbKEk8at3MXdjDxKak,6218 +resources/oci/governance/audit.png,sha256=N_yOVW-YOLiY-kCLn80zhprH4xdGaW6d34ldtBP1pnI,6527 +resources/oci/governance/compartments-white.png,sha256=Ly13emDf5xQO9iTv4rdeLFmjq5m_hZVD59y-4m_zLDs,11810 +resources/oci/governance/compartments.png,sha256=D1npamYhuMgQM4uqG7uG7vfEJzh-QEYkEZHEIbHGdl4,13730 +resources/oci/governance/groups-white.png,sha256=57CHW7pg2QnrbtkBxEJaRRAqICkl7v8XK3ud6SEpuJA,14434 +resources/oci/governance/groups.png,sha256=q_3LOId-kzB64tmgCbUM7WH-kezYyppg75QDJ0c0uo0,17411 +resources/oci/governance/logging-white.png,sha256=nLKMQzSeVKZD9VwmVFKPeLVwFHl7_z2eUPIANJfd7zc,19814 +resources/oci/governance/logging.png,sha256=IBObYkD3MmPPizZguA__GSKxrsw0JXVR17m9wCpFJqM,24900 +resources/oci/governance/ocid-white.png,sha256=2aVTXsh1c1eECG3chYWw8j_pVGvMcqL3OnweGnvGdZA,12328 +resources/oci/governance/ocid.png,sha256=IVzO1Q7wPXRLTzLSjz0DUJd00mPiUlwGscg8Yw1jx-o,13841 +resources/oci/governance/policies-white.png,sha256=2hj3A1V0ABCyUikU_JCggSSqXeHEbXTaIxU3eZwCBrs,14684 +resources/oci/governance/policies.png,sha256=BiefGCqEygTAtUePllBZRjjJIfkFJyq3ZpUGB7APs80,16587 +resources/oci/governance/tagging-white.png,sha256=BxqbnXME-3CpPe4N2bZuTgMy3dkKnRHskPQKNgWv_rI,16684 +resources/oci/governance/tagging.png,sha256=NB5Wiy89sEaAqASepIWWmtmVgartckGRyPiTBUkuen8,18769 +resources/oci/monitoring/alarm-white.png,sha256=uhB3NHAvQZXATZG0KDP1NKk-HDpFiKgpu_3jffUjMTQ,16657 +resources/oci/monitoring/alarm.png,sha256=wcZupydKUemhA46QauWhT7yTYo2fYFjLe5MHDEZW-Vc,19134 +resources/oci/monitoring/email-white.png,sha256=qeIR0dnFphi1Os-EuoauKvrpL7WccKDLA1N6K8NwxJU,16006 +resources/oci/monitoring/email.png,sha256=q3z7exct7HsJoVqoND2WhBmlo_sdGLTAAHkkE8Xo0Bk,18188 +resources/oci/monitoring/events-white.png,sha256=Jw83K8b9r9wmeSl0Md4hYTiRpuXJfJ8QyHLy9RAyVxw,12469 +resources/oci/monitoring/events.png,sha256=a5q61yl_NXOWN7kP7dGNlstinC8MSSrH_7JzjohpE14,14051 +resources/oci/monitoring/health-check-white.png,sha256=LeuNiEomqdctXQBsJ5cmLFBzfq76IpaoeDkngGWM53A,7439 +resources/oci/monitoring/health-check.png,sha256=nbTlMWunVQ2tWG25_3n_-7x4caXfOEUO_2q2wV1ng6U,8152 +resources/oci/monitoring/notifications-white.png,sha256=3lHGcpD5Q1PXyucaBb84SxpnGOCOtlHJJVpViYpLdDw,10161 +resources/oci/monitoring/notifications.png,sha256=m3lUzwOgGZjJ1YrPahL6e8KXoTGMQGXRwa22e9acm4s,11325 +resources/oci/monitoring/queue-white.png,sha256=OGeKn15iUY7Td5f_SS7I3jATkaRJbqv5EaHOPaYJI98,8963 +resources/oci/monitoring/queue.png,sha256=1b14GxTeVXsWSeTjLBv6Xmsw7D5d_e8i1h4PShJEaBk,9882 +resources/oci/monitoring/search-white.png,sha256=Bdm2RHjdxvPZj-Vn2Qy8WY7sUhem9N8CGt-hQ1NqCa0,19008 +resources/oci/monitoring/search.png,sha256=QHtyyLPCXHLErGTBGIijepS1qoQDJhtLlfkZXUiH2eg,22853 +resources/oci/monitoring/telemetry-white.png,sha256=HWWQl40CFLDd5Tm9ipr08lJcIP97uvU_HKSuEWzHsXw,11072 +resources/oci/monitoring/telemetry.png,sha256=nPRPEblLG8F9vT7tmD-SgDHyi_inEQIav7PJ4OHu-SU,12185 +resources/oci/monitoring/workflow-white.png,sha256=DfefOLj3xqI6SXtlxIDrfkvmYx4UAzdQsTD9J5NJItk,10611 +resources/oci/monitoring/workflow.png,sha256=eYh-mue5m7iyABJFRd7Lqww5xXuJp8fF0LEv8S4_CJ4,11880 +resources/oci/network/drg-white.png,sha256=1kpoMHV2hTlnv4YxTxdgfwQlMhgm62XHB8XXAISvd1I,16029 +resources/oci/network/drg.png,sha256=BT8-joVA4aAKLKIVd5r_-vkeNZahNL72jJ_BMiENYqg,18643 +resources/oci/network/firewall-white.png,sha256=s819mvjqJmz6oJaKWHOxbUrdCsznNrk_xUXMHxp3kZg,7753 +resources/oci/network/firewall.png,sha256=w933iWST0Y0QA600VN2facGjfrvtrzRvutUe7Ed3mas,8361 +resources/oci/network/internet-gateway-white.png,sha256=jE7_0-HonTk4IUY5OuSBtFNO-ZSM-q_aob4xyEMZyQI,15739 +resources/oci/network/internet-gateway.png,sha256=PHPuhlZmP-7_dektfItiJUIwL2yRY_LO30d8FUKD9R8,17909 +resources/oci/network/load-balancer-white.png,sha256=4F_19CKDDr42yI5NPK3cQsMC3fxwLkjfApZFOt5LbQo,11204 +resources/oci/network/load-balancer.png,sha256=fYtMT9hBiNOWRzgHNCQF1TF1g3412E_5KbE_gqKGTNI,12472 +resources/oci/network/route-table-white.png,sha256=otQ5Ivs0wk8I_FN4wEQSY2cArq-JuNAh1ZgjIiIkxFo,13248 +resources/oci/network/route-table.png,sha256=nQ-VyQO9FutcvD-7BPwXUfSlysxIZbH57Tsjx0GKCXQ,16184 +resources/oci/network/security-lists-white.png,sha256=HN9_rW5ZXkWrCO1XkbbaxJ9rdntJnsO1VPHz6aUKre4,14132 +resources/oci/network/security-lists.png,sha256=NBt0tP9wbtQt2jOdlrR2H74l-DtZjABoYEe8UfPpNpI,15965 +resources/oci/network/service-gateway-white.png,sha256=TEpsjZhMndLzwmSOYY0uqmdXXsZ1_2-K8M661m2Dg0U,16415 +resources/oci/network/service-gateway.png,sha256=pvijdpPwvkxNU3U2MuUT8UAMP49mMYCT4pJPXIGMMBw,18599 +resources/oci/network/vcn-white.png,sha256=cACp-8RbbqkmYPq3fhmZ2sZVtLpDG2prQ-vTmvqMC-U,19826 +resources/oci/network/vcn.png,sha256=qpCtyhR1lLr-nixd2XXP9gO6yl1R-8wwOlrwBmd0kMQ,23572 +resources/oci/oci.png,sha256=pQM6Op-E5WM8AoeCBOGJ6bqWTZ4trFR3L9tCgy_jobc,7432 +resources/oci/security/cloud-guard-white.png,sha256=7DRLpBpfHz_aMxlsGKrrc9csluXqYKa6HtKCumnLvQs,16263 +resources/oci/security/cloud-guard.png,sha256=4UhW43MHhpXUuX1veZbqbvfiaTWzb7apJhJcOpw3a_4,18599 +resources/oci/security/ddos-white.png,sha256=GfPoNAgzCRIeTxfutfgeAC8Thk0xgAXk2HqErp69smY,15539 +resources/oci/security/ddos.png,sha256=CrVyKHfHeCEh39krEO4TJ_hFoPnCefh8eej5ojBi-WQ,18022 +resources/oci/security/encryption-white.png,sha256=xJAVI4sdX-K3Pc6nYDRVdOtq-hBKTBDSye8g_6-cifE,16407 +resources/oci/security/encryption.png,sha256=skxrnkcfVr2eZiLHJu9LLq9X_ulaTsbkRa7FUUavpNo,18559 +resources/oci/security/id-access-white.png,sha256=gM7t3rD2kUyAZdPRqUlkvMmzswXJ4-IsAiDwdC0S8iE,14961 +resources/oci/security/id-access.png,sha256=V-9RW36eImVYEbR1bGCzfXAth28wv4SYfTU4VtwmQUA,17254 +resources/oci/security/key-management-white.png,sha256=BRBfk_t6-RTlM5l3oCVG-E2v4IA1rQlZ8PQzHp5NurY,15067 +resources/oci/security/key-management.png,sha256=5kDJdQ3Q_JtvEgjoR9KnMffPJWdZg58oOK3ZH_3qdno,16790 +resources/oci/security/max-security-zone-white.png,sha256=03oC_awOyPuxdPCdYD9W803z0iu6O0Mpi8enKmyv1CY,12610 +resources/oci/security/max-security-zone.png,sha256=BiBlAl5lBPAO2eef6TZMuVP_HWVg7JaUZjaaixZKf0Y,14124 +resources/oci/security/vault-white.png,sha256=AnqH5mbTfcqMt-SyyYkfenwfAjA8i4x3ijbAPxbeONI,17000 +resources/oci/security/vault.png,sha256=Vt2Ct_QkyZ_fv09ykjli9N1RCUSbuuaoHE25k1Fizjk,19628 +resources/oci/security/waf-white.png,sha256=7aTVyeWVHR0wkju7RxYUxeokjrM1j6yx8O3gIkpxi0g,16875 +resources/oci/security/waf.png,sha256=4htGQt-y_bSH7BGdC_zOag6F1If8Ps2E_LN4rr8atfM,19448 +resources/oci/storage/backup-restore-white.png,sha256=_eQ7u_sqMgxPXMkSZaYJjU23c4jSYZdGjzMfZjTtuTM,15872 +resources/oci/storage/backup-restore.png,sha256=SATVrYwbyPfCmT-ggQlmsjYGcYKhpQeSekAjCOVrSog,18605 +resources/oci/storage/block-storage-clone-white.png,sha256=XK4zXpsQ3sR-hyCrImQCvoZGS_MJ_22-VbHPI9i5AEg,8989 +resources/oci/storage/block-storage-clone.png,sha256=apfCNai5Q_vPXCdaF3e9Qc4cqPWFUiWDY2UGCEbbinU,9979 +resources/oci/storage/block-storage-white.png,sha256=ovH4xcUz_TuIP-9QplfLgdgKfJJRYNc7sHw3ch_w6uU,6672 +resources/oci/storage/block-storage.png,sha256=-4uSp1Xu44atIszMeTbBlVq8O7seDWJZp8VYUXhI3ik,7223 +resources/oci/storage/buckets-white.png,sha256=RM50lsLF67zy7-4Z_CcT3XQn7VpjnM9Ek2V-bNpa8AQ,12501 +resources/oci/storage/buckets.png,sha256=Iy6rreJM0Fm3fCyFCHQijw5DisncjzcVdx2c5UdlQQs,14665 +resources/oci/storage/data-transfer-white.png,sha256=5zB1hLb-SeEzq-CJwKBJPC3CkfWWAaJ2vjzxAup2SFk,14241 +resources/oci/storage/data-transfer.png,sha256=41_sOyQJ13hyctovdoq0dedODIg5aWhrS0RsTZQ4tw8,16293 +resources/oci/storage/elastic-performance-white.png,sha256=dOm10sI6vIVFNPS90XrFe0MX6iULSkhftC5AtkYWtHE,15329 +resources/oci/storage/elastic-performance.png,sha256=nVk1k5m9J8MtM92_MwOfaUmTA_fds-SlyE3esIGsrSU,17746 +resources/oci/storage/file-storage-white.png,sha256=6aDVkev9QxjDXvcGMWP80EDxaZdQ-m6CKYBPnmpP4vM,6999 +resources/oci/storage/file-storage.png,sha256=6wUW9YVzHxYlWJIUVjVhduhbSqZfFdhOv2hyaoSejqk,7430 +resources/oci/storage/object-storage-white.png,sha256=JhUCTkz5su9EINZVaKd1Gm50UftnHVWgQP-CxPDFiac,13145 +resources/oci/storage/object-storage.png,sha256=Ue2JBH18i6Sd5-Jf6CUk1IlV0GqERhGIjkkf_bVf9Ts,15034 +resources/oci/storage/storage-gateway-white.png,sha256=GbVcn6YVREgW8tmVuaxziceQND41zETU7I22nBDoxzU,13503 +resources/oci/storage/storage-gateway.png,sha256=VrjM1oDqeC82KOoyx6oDXLp8FDaK0Oo4zTw9ZNWwEKk,15661 +resources/onprem/aggregator/fluentd.png,sha256=2CBNUu9wXhRoCMJVPs6cb31zj-Jl1ehBXwwwGSwNTLE,25142 +resources/onprem/aggregator/vector.png,sha256=ClIL9zqQ2whoKuEGTw0d3P5qYZcOMAaco06K4l9Povg,7830 +resources/onprem/analytics/beam.png,sha256=vhLhFob5EOoo8U84B0ES2m7aA9K7OrkqIz5NIfG3eYw,29758 +resources/onprem/analytics/databricks.png,sha256=mLZZLCiDZnK_7xDwC0dbOfLAHaAAcRUlYoe8gvvP-SQ,4471 +resources/onprem/analytics/dbt.png,sha256=wdB8CBBUDIlRvCBuIPXEq3QuHVLFePOnkSb8TciG0Nc,19617 +resources/onprem/analytics/dremio.png,sha256=bR4rqm37ovkhQt7KtTzhIuKzYtEwXpVGH3cMFNsUDq4,31687 +resources/onprem/analytics/flink.png,sha256=bs8Sxo6Rvrk594Pftxgcm5TXgSNRDrmkd7UVxVle5SY,90670 +resources/onprem/analytics/hadoop.png,sha256=3BQqnN_cqpe2sC7PePsHKWYyfyeD48kJJUSuoJjeA1Q,46970 +resources/onprem/analytics/hive.png,sha256=KJgkCdSMhqOyA0z5eF4HLCu19Rd-fNRtgoa-Nwpg1Wk,26950 +resources/onprem/analytics/metabase.png,sha256=0U3qliRCcCsnQRTvNT0-r7XuPLZ_xuczgqkMC3VSm7I,21325 +resources/onprem/analytics/norikra.png,sha256=v46LXZqYKEn-Uv6_6DgcyfBKEGkfK4dpikqyt4sPAEI,17638 +resources/onprem/analytics/powerbi.png,sha256=dYMXJeAVBfjzAZsSQhPkM8sNL_vNaQZI7JmE8C-xcVQ,5725 +resources/onprem/analytics/presto.png,sha256=G9HBXacZN7Db3_vyYuvYB9xCc1Q01zNkGQBxv9RTRc4,13575 +resources/onprem/analytics/singer.png,sha256=Lq3iO43qD_QCHWNgKd0wt40yr-aYnFDf323VL4mN2RU,14848 +resources/onprem/analytics/spark.png,sha256=5LNzbGqMOlzR4ljeV8bweKOMRpvmBTtyysXqKc9zH7I,23872 +resources/onprem/analytics/storm.png,sha256=RJ_-qEBomN5sb0fD-YI7l2-fgLULj18RmErujU8poNc,11894 +resources/onprem/analytics/superset.png,sha256=L3kXYx_8tj2wblW0wKO716SI_5-Cmo2IeYXQUTcDjoI,12715 +resources/onprem/analytics/tableau.png,sha256=O3vVTCJRUOATrVbg05181CwBlv70bytqo0rUUMNF1VQ,3138 +resources/onprem/analytics/trino.png,sha256=8ps7iGMdaaIBJYvFHo8B1S7l8ED-AlR8xUj7lIZvCoI,44239 +resources/onprem/auth/boundary.png,sha256=-YE88o3x40qZVDf1XW3oy15iEmzeWT5rWMk2NkXNXHk,11389 +resources/onprem/auth/buzzfeed-sso.png,sha256=AADFlfRn2cnsXw4qDvNOHA_9w0z7SUMWEvsljKhN95s,18611 +resources/onprem/auth/oauth2-proxy.png,sha256=aBzpiAJ0kjhwQpTeet9JC5MH0UzX64NgBz9A2LLTA6E,12655 +resources/onprem/cd/spinnaker.png,sha256=QQKZTYTQha2voXtW5eQpIDADMk5e_O8cbfTOpVOPkuI,40938 +resources/onprem/cd/tekton-cli.png,sha256=qqKBlVlPOyuZ_q2P2CqPIn6q4CmgipFNdpybAKWSDKI,36375 +resources/onprem/cd/tekton.png,sha256=XC_n9l9QDLnpHxGfUupqDQpPMaSk8ioAP742dzBCR84,78504 +resources/onprem/certificates/cert-manager.png,sha256=HcVe3De-sFNloldaeK8M0cEOSBuyN4U3Bq9qAS5Qta4,32453 +resources/onprem/certificates/lets-encrypt.png,sha256=kld9SnTSfnLIYOpInoCRHE1IrmDw6iWonUIdI6Yr5cU,5160 +resources/onprem/ci/circleci.png,sha256=kbrc_fbPzdQlstcAVhEanJ_TmoD1h_9e2qMcYNo57KA,6244 +resources/onprem/ci/concourseci.png,sha256=HGzkDmwKNYfr_eI0CR0PALmytZNyBPS-u_1Mc_BdKAQ,17700 +resources/onprem/ci/droneci.png,sha256=qz-aaYFDBZXGUkd8xyL7kti3cCbb7BUQhNslk1EevrE,12080 +resources/onprem/ci/github-actions.png,sha256=YycH8s6HHz53Yqm6q9RaKgrXpz7oKlUGE-gfWQyWVsI,39261 +resources/onprem/ci/gitlabci.png,sha256=n82kweCCcFjqeCYS_ksQldbKnRmVSsf7c4rlQj3taYU,17108 +resources/onprem/ci/jenkins.png,sha256=McgR0yLHh4izWmXfytnxYxwk3syAkj8wIEwEAUZLkVU,31763 +resources/onprem/ci/teamcity.png,sha256=8pswxnvV_VaZhyn2GhYeMpLlXB_k44ljvCA9uvgD_RE,17112 +resources/onprem/ci/travisci.png,sha256=X_mDAqXKausMnZkBF95HXR5goHPG-FBlzvkzDAtlKfg,39602 +resources/onprem/ci/zuulci.png,sha256=44VN1RExVad6bvNKBqNeQieEO4Lzkw486L6iwZlQKRA,6363 +resources/onprem/client/client.png,sha256=62_v_jkAQIETIRWWP586gIiv2QPz3aaTa5NuZ9h-BEU,1860 +resources/onprem/client/user.png,sha256=_yR7HaFO4LvQAiU1BIsF0PmMyvMqEmbH-9rqudFR1bc,3748 +resources/onprem/client/users.png,sha256=Su8gFAXOfcm8zbR5FsnIJcPqZMia1QTurbu5k0eOxec,5205 +resources/onprem/compute/nomad.png,sha256=zhQmZE0g6cax1IfJsbOcBuGPHFa7uo_-iNnCvs3vEuQ,8786 +resources/onprem/compute/server.png,sha256=oVAgNrypgfB2RZ944dTQareJ39EuRRpPZDiYoRuQy1s,2328 +resources/onprem/container/containerd.png,sha256=NU8i5EFz-qz6yvZi5dCLSfZOtwi7JBEgclxFWKeWzbw,1245 +resources/onprem/container/crio.png,sha256=nchbCjW3USSYQM4BkT88J5_MyJ4c_6TEBolRTNVhOs8,29170 +resources/onprem/container/docker.png,sha256=yy-0f44d9UmYHklYs7Xg1JrVCvc0ycuE_Xe7BH1Jm4U,11250 +resources/onprem/container/firecracker.png,sha256=g6iYv26fo-BNoTQkEnwQFN8JpMrsfag74_qFvkODcfU,43918 +resources/onprem/container/gvisor.png,sha256=IOfU36QOq6E5V7frwXtQM_l07mPRxTkukun9kCNff98,38731 +resources/onprem/container/k3s.png,sha256=IWMnZfMXmoG7xi6iuY4hqR6qR2GSNNwQnEnSbmiw8nQ,4961 +resources/onprem/container/lxc.png,sha256=OfVkLXKRzBxF5DK5UUTQDW4GWrINQk_zWJP0ILCY2Y8,26354 +resources/onprem/container/rkt.png,sha256=ejSyU5tHPHw07rcR_yHHioboLJQDcUsawFIMMB1QcJI,11239 +resources/onprem/database/cassandra.png,sha256=XmnpyQNAMv0We57R9qNjtWX4HT7eHUCHuKZHa-pK_Kw,26289 +resources/onprem/database/clickhouse.png,sha256=qFqnFc7hkgr1l_cLkY1MguUKVCXZt56hM-u23Iw1u_E,377 +resources/onprem/database/cockroachdb.png,sha256=-HYZ6XPvT4lcE5VYHYBP-sxwIZkC78VW-qdcYCp5754,11305 +resources/onprem/database/couchbase.png,sha256=wuDbdRvMkO8bz7IXhdKyNpUIDz73e733dkBYj1gJHJU,12091 +resources/onprem/database/couchdb.png,sha256=T7EpjAJF7UtjA4eb3n0VhW2_SNZbxPYjvKHFmmR0jrk,5993 +resources/onprem/database/dgraph.png,sha256=rZf-3vOi4vptQ5wQzoxhQT4kPibcWkohVuqE847A7Hk,139181 +resources/onprem/database/druid.png,sha256=a3SqnlIrAWJGrgDf0Nsrn6zEXOenWbDrm5sa0M9z0YQ,7272 +resources/onprem/database/hbase.png,sha256=kU33dZuu7xpdyJgRjoJZfplidvEnjsb8PPfYHrK-0rM,8565 +resources/onprem/database/influxdb.png,sha256=ksfTHdc9boS3pM9i8tHybdiaEgZPbyXTbM_oCLVa0SQ,24051 +resources/onprem/database/janusgraph.png,sha256=ccz6afOwSW3TmQNsHpQLM1vGViUYlePayK4cSiheODY,7610 +resources/onprem/database/mariadb.png,sha256=2R9rrj6P2BrNCSlEC5Aj607rNTob9Fg_0qK2KBZWrGE,8405 +resources/onprem/database/mongodb.png,sha256=84h0SCw3ctdQ-rlrSc_Kuvgp8GKU7BKle91VfHPg1Qs,6532 +resources/onprem/database/mssql.png,sha256=-BrZx9Epg8PRW8YcVJREN6f3_MZ3MrT8hm5OI0hOCKA,36305 +resources/onprem/database/mysql.png,sha256=d7Otuaab-V7iFOiC0F-mGoCalR7undsTz42fJNfLDt0,52252 +resources/onprem/database/neo4j.png,sha256=6Nh96NiMp0ENW8bLCbRkSWUhHEhHKW9-eYOn2FMRVHU,17358 +resources/onprem/database/oracle.png,sha256=68Rf_sPFmFUHR3_JoFIcfwJCAavCNxbgFjx_qKeWwYU,8685 +resources/onprem/database/postgresql.png,sha256=PtNjePt5uX6LoGVqjirfVaXku6TnPnuk6dqIir3j4VQ,37863 +resources/onprem/database/scylla.png,sha256=FrEgQDeKsr-QtYItFniwnPvgruapHIDqXDSJdu4Dbqo,130308 +resources/onprem/dns/coredns.png,sha256=8_4jVvGb8M1tMCXrJkwJJh7r3ElrVenfYk1AxO8hf1Q,11315 +resources/onprem/dns/powerdns.png,sha256=Cy8AWflB_Uer9W1FqGgnZPrtcers6jgAn-0LOgE2E_k,33870 +resources/onprem/etl/embulk.png,sha256=ILdUx6RHMKTVlNnzGd0Jyz8bQfz9ikqZPLzVs30P5WQ,37898 +resources/onprem/gitops/argocd.png,sha256=MejzB__nqvywk1eMIN4S30caSTKLFuvBYvmgl2gHPoM,23139 +resources/onprem/gitops/flagger.png,sha256=QHKF3XWS4p0sVYECUTNhQbFj4TQ2ZTkolwvvXcMHOnM,27435 +resources/onprem/gitops/flux.png,sha256=b_kqNiUa3nUtj3AAJ5wz-f271_9SQ1wGU0uIj9PSl5I,30310 +resources/onprem/groupware/nextcloud.png,sha256=GVqcL330u0oQGff0mRqt5neiu20jKyhSqsgsbZvRgFQ,12291 +resources/onprem/iac/ansible.png,sha256=Dq-2U66_YTV3Od9nkogS_tR6nrxf_OpDm1rxvnEQZtE,11434 +resources/onprem/iac/atlantis.png,sha256=LDUQBZ-HYNXq6izFy0CGdW5JcbjbwRXKZ2YiDC2oc8I,24271 +resources/onprem/iac/awx.png,sha256=RZzFrSOfq4LI5ooA9G8Lcu_S7Cormq3G-8WgaPyDhE8,31884 +resources/onprem/iac/pulumi.png,sha256=WbAQ2-umUGT77xtaBNnRjebkG7cdW-VGBRcJr_iZ2Zg,30663 +resources/onprem/iac/puppet.png,sha256=yqYtjy-BkQfvIWxCKcYb-lYsbsMShHNAAdDfmpJ2BLw,7857 +resources/onprem/iac/terraform.png,sha256=wqir0FE-vX7MmTKBHqqlMwXsmhMb7dQFGB2tpX9rVT4,18336 +resources/onprem/identity/dex.png,sha256=W6gZur8vfghCGuVsFPMTwOgE7ZJefZUqmOvz8oyJEb8,11028 +resources/onprem/inmemory/aerospike.png,sha256=-Kyf8tKdKLH-YxXlOPOQN2z-RTNhPBFhOEuizMPvqZI,14732 +resources/onprem/inmemory/hazelcast.png,sha256=Fd0ttv1we0s7e-NOWGuA58i5eBr9IUlQzSlporkoUqs,2935 +resources/onprem/inmemory/memcached.png,sha256=DcoPO_Ne2ZRLiKcXe3DTfRleYvvM72k1-JTORyi8psw,15555 +resources/onprem/inmemory/redis.png,sha256=ihSBgPVW8wasIyrCu5jpJ4_k3fKr7c2JKpaeEKnswvg,12611 +resources/onprem/logging/fluentbit.png,sha256=nNez7of2v8S-InuE135iERm70_FKRwx9RGLTTfnR0X4,24726 +resources/onprem/logging/graylog.png,sha256=zWPr6f1P_lFXTSN6vJhIl6_hcVOwgabR0rdSTDIAo9g,20364 +resources/onprem/logging/loki.png,sha256=YfL_saxrtPFS-7s6t13OkMASSuso6bXAlZPgHvLjfF4,19766 +resources/onprem/logging/rsyslog.png,sha256=B9xCCg40Xrmw5PHIZMcKfR-btMbcs37l_Wy-RwMWGEw,12543 +resources/onprem/logging/syslog-ng.png,sha256=g4db5E-yO1g1OxYwb6_tVWKDMnXq44sCSz6_G5sQkWI,15150 +resources/onprem/messaging/centrifugo.png,sha256=kLJf53Sc0xcx9dhljAWriHftmaAZ6IpCD9VdAM9Hhro,15364 +resources/onprem/mlops/mlflow.png,sha256=vNmvC5r5ub_XVaW68Df3ECUOk2eCDRkHoyETZTfnigI,9707 +resources/onprem/mlops/polyaxon.png,sha256=vWARJu-tEeuAjz_Pjeq65b4MLrEfHLTd9myq5v094QQ,21343 +resources/onprem/monitoring/cortex.png,sha256=4tlOOEJp0iiy7n32KdQB6knmlPD9qrrQw73oRbS6epE,17704 +resources/onprem/monitoring/datadog.png,sha256=eI97m_Uh5pxnN4QZlJFHFWUgof4uKdx1I9GkkBDRVgc,42344 +resources/onprem/monitoring/dynatrace.png,sha256=7ylL3QqEEIwpEFT8An7k51Y-nVFqTg0SH3dB6PPxGp0,19210 +resources/onprem/monitoring/grafana.png,sha256=A-SyXnoIRby_evZ7VmIE_Jn-O0xYG8HuaVuQU1Y7lcE,30223 +resources/onprem/monitoring/humio.png,sha256=GSXzaYpEjVvQxV5WEwxC-ao_UU4UsfYG0mWyP_GMD8I,14629 +resources/onprem/monitoring/mimir.png,sha256=w8Hzl1WVn3clKBkQgVAcWlPeyeiPo8X0r2vmjdJ696E,24363 +resources/onprem/monitoring/nagios.png,sha256=RZZseC4HAE-mkV9VO-otdtyp23iNZqRbsO1IDFeBF9E,7292 +resources/onprem/monitoring/newrelic.png,sha256=chIF57ErofzGWzmKw20gZFKTJI5OGIZKT0TVQ9MSOfU,25708 +resources/onprem/monitoring/prometheus-operator.png,sha256=xMJBpTFOlEU5p_dQfJSD8SNiTbwplOmOSXUrVLvqlWk,26076 +resources/onprem/monitoring/prometheus.png,sha256=AT080zsPxmAB0Vf-7IG9LPVzLWAnK6o_HVcOUKqq_Qo,7689 +resources/onprem/monitoring/sentry.png,sha256=xYuYIF42QCxwknzjuTGGb08YHwyrP8DN6TCqa9qySyo,7997 +resources/onprem/monitoring/splunk.png,sha256=uPqV5HQVy_ib0ZGPU6rN6rRUrfZESFYPNrQqebMIkX8,13691 +resources/onprem/monitoring/thanos.png,sha256=NCau6BfyS2N6N-g4kWY00drYvlEe3yJJfAhFuCPpgFA,3244 +resources/onprem/monitoring/zabbix.png,sha256=5LRNp_pef7hByQ8IEvxp5ShwJ82nI5EOvj5HsXg6etw,21120 +resources/onprem/network/ambassador.png,sha256=pVRz-w3N2ECsVTfhkbjSvqDT8AQnMPWjTJX07t_XNHU,19548 +resources/onprem/network/apache.png,sha256=5xzo6g1XhUt1DY2hg4S3RCXGvaWXuL7Dfd_iCqgIVW8,18106 +resources/onprem/network/bind-9.png,sha256=TLyQz_NN_OwA_4uBMbpn2sh2FDF0oNNmuPZzBlZluao,12580 +resources/onprem/network/caddy.png,sha256=7GJ1cJbau1Z5YN7Sb_i0KeicwxvtkOzff4py36DdZVo,10830 +resources/onprem/network/consul.png,sha256=BbMKVJ8piMI0dfBy6s2AVSg5mX0imEpnS2WDy3LwH08,12090 +resources/onprem/network/envoy.png,sha256=LQ46jAB8L1LQxUSGvjhCOFnIXg6jY63INDhIzkcpTJ0,13441 +resources/onprem/network/etcd.png,sha256=HruYTKrtACrbxgf3HckbXqaRQQW15043iTo0FeiPHIw,11111 +resources/onprem/network/glassfish.png,sha256=VuwyDk8sejqb0pA_mlbaXKxNXpmRi826cZuiFfq1-jw,29777 +resources/onprem/network/gunicorn.png,sha256=7a_cK30FE6b8eSDTBwGZYzOl1SkMFbUpGBTNkF3RlqE,15137 +resources/onprem/network/haproxy.png,sha256=jKnXLoaOuQSQqqx5k5cdHfCb61fYrMqHka4hEzNhJZA,31874 +resources/onprem/network/internet.png,sha256=XhXDYkNMiEM3VQhw2FZIHEriYiUsnTMvAvm0a7epw70,3731 +resources/onprem/network/istio.png,sha256=xepiEN5yeWRKIjT8HGb-Gtx9F-OigegSrub0os9HV78,3422 +resources/onprem/network/jbossas.png,sha256=QcAn2ERgmTIG-AtuwuaH0noTr_sPP_y86PsdDjyv1G0,12040 +resources/onprem/network/jetty.png,sha256=ExkptozTOEVFG5R9WFi11wpaOvex_fzboPLjk5ODFg4,18142 +resources/onprem/network/kong.png,sha256=U5OOJu-x8ly8dEL-9bS-PYtgHX0QqcT4FQtoKDoIcsQ,8784 +resources/onprem/network/linkerd.png,sha256=nY8Ad8m9O16rUSRM8BaoG_6cU-JRcuz92wTkGDZtoKo,11554 +resources/onprem/network/mikrotik.png,sha256=AX3QXpbBHCJ5JBIzUv1C9VrXqgXmkI1o3iaOaKE3IvE,6164 +resources/onprem/network/nginx.png,sha256=adNScgJL_XIQVhb83pq_YfeNKOpXe2YqKQ-269TMsYc,14822 +resources/onprem/network/ocelot.png,sha256=WnoJIVng-AISJcFWXT_g80lQysb1klEgb1RB00hVEGw,45438 +resources/onprem/network/open-service-mesh.png,sha256=sqjhNGyFqOGaeqlRTJh9KeB2kYwM95MULhpXkqN-syU,21752 +resources/onprem/network/opnsense.png,sha256=ZiKl7wxQMazpaJZTl_lFTq7cykkBR6AAoyJ8cL2YPao,5770 +resources/onprem/network/pfsense.png,sha256=5egBIFcYDtie61_xk2uxglwPbzcwROn3bKrFNBIpQEM,9556 +resources/onprem/network/pomerium.png,sha256=5bkhCufSEtuPJIoEgcDQZzDdOVUrYXDmZHKeZORMLFA,25201 +resources/onprem/network/powerdns.png,sha256=8IRbewi0yO6IX-DsUvWxKJukfoHR-2OXniSCMdsk6YM,5191 +resources/onprem/network/tomcat.png,sha256=9Tk97xsjTqmN6QHM7z5FzR4YyfEbBpg4HKJumoljmzw,34636 +resources/onprem/network/traefik.png,sha256=xJCL6Q_zbGyC19Y9adzH_lBB2bG6OyxLTVV9nT1pYgA,22093 +resources/onprem/network/tyk.png,sha256=h31XYOP18E_NFe5A6raa1pCQVKirBUMw1OuynTr_waI,6151 +resources/onprem/network/vyos.png,sha256=zDUb8jCXcwZfZyGJz4YMoCIjfHM_jsIdTg-8tBZ2Pqk,10555 +resources/onprem/network/wildfly.png,sha256=sU_XrSlvyt3qYeEQy7SjTve-_EC5HmSPIqADgfacjws,14382 +resources/onprem/network/yarp.png,sha256=yOft1aiHz9A4JnTrfsHFrVWxJ78XaipL0o9kYZYZLjo,26466 +resources/onprem/network/zookeeper.png,sha256=Dhgt8eRRmuHrV3_AEL9IMzttU5dB8QdaT91GSLJa8HU,26639 +resources/onprem/onprem.png,sha256=oVAgNrypgfB2RZ944dTQareJ39EuRRpPZDiYoRuQy1s,2328 +resources/onprem/proxmox/pve.png,sha256=r-tzXTZQdNSsYPSk97sFR5tSp_t3WkZeq9pZpuGcL10,10193 +resources/onprem/queue/activemq.png,sha256=0Cev0mNcYR1CKPD-1y1QcHGO_2302j0S0CaSCB9TO28,16241 +resources/onprem/queue/celery.png,sha256=ZwWeQlkDla5vM5rux9lBNZ4hpuDqFkb1nFCFRbx-tGo,55251 +resources/onprem/queue/emqx.png,sha256=7Qa9_cFjdHfuXe-uOgEa71MTr9vFThR6mAeCovLVrXs,5301 +resources/onprem/queue/kafka.png,sha256=pr97wraiHaf22llxCSSbru5l2TszcBic50e8Q3BMXUI,6560 +resources/onprem/queue/nats.png,sha256=qDPBVQBqclkJNtR0Jn1iY9WVjpmn-iVgCWk5z_47JVQ,5539 +resources/onprem/queue/rabbitmq.png,sha256=PyT8JvvikUz5GmOG5Dk9BnhlKfrVX7GYFMYt0SBEH-Y,2260 +resources/onprem/queue/zeromq.png,sha256=3nHd2MQ_HcdE53mtY_B-xXJhKkh4q3HfMEHdtc6tTkI,6380 +resources/onprem/registry/harbor.png,sha256=qLN-_IqCcpbD1tRUS9C0tWZzXtp7i8N2Pc-71xVs_Ng,27245 +resources/onprem/registry/jfrog.png,sha256=OdYghXw60wLu5CMZKpj4ypUPtlvsShpEtSPHdbf_Tww,26503 +resources/onprem/search/solr.png,sha256=p2WX6isDxA-HkgMhfbaywxntcnOGfGNfYIwhMuPTN8o,15542 +resources/onprem/security/bitwarden.png,sha256=XJEl3S3Ck8pvsQHL_wA4WVDFUDdx_8dqyv1DU-0GcYI,3288 +resources/onprem/security/trivy.png,sha256=EGd-2AJMxkiqUOcOXeV25f00x0U0jHPGOyo3MKJXeuQ,11056 +resources/onprem/security/vault.png,sha256=FfcH_XjCGOdVrgpUBAedAIeUmVLhN718njRMo7uyJPk,7000 +resources/onprem/storage/ceph-osd.png,sha256=UHbhIjKjdL_PWk4JpCGxh_DOWqWhVN-Y1QyNxX20jYY,23920 +resources/onprem/storage/ceph.png,sha256=PhIanHmGV71hdPfcDXxKorH2XENpYjZXOI8gc-7OMro,15385 +resources/onprem/storage/glusterfs.png,sha256=ycmW9PIhd2bUi8y4DbZ3IyfcjPZOd7eY0ILOT84GWJg,38742 +resources/onprem/storage/portworx.png,sha256=nSg2x39QNFO9pANqG6N8REq3a309pF1TgrQMSidEWL0,10403 +resources/onprem/tracing/jaeger.png,sha256=DDRlU4ADF-ht9v7zvRIQ6UbNmWEntQ5Z-cVAgIJ3xyk,22325 +resources/onprem/tracing/tempo.png,sha256=Xz68G2TYgasiyug7i53Rk5x-zuWorpnKCo5Zb8V2yNI,15492 +resources/onprem/vcs/git.png,sha256=6M2WQDmXW0AYe5nlBjLKPoQ5hf5QbClB-WJUu1WQt6E,2983 +resources/onprem/vcs/gitea.png,sha256=rlF7T9VQn8jOQ1SIvAUu-X-a2i80IXgRAQIsCY1Jby0,17896 +resources/onprem/vcs/github.png,sha256=Bn_iWGK0Ap2ZtbIu6gszO___KSkU4GLWBWPNTW9AJdE,19989 +resources/onprem/vcs/gitlab.png,sha256=PFE7azkMEfQXNgmDIDU9DV-rGnIWy33afgxUKQFj8uw,6947 +resources/onprem/vcs/svn.png,sha256=ZV0Nc1BcdXDAjFzrNqECITTeygg57rU359xKF-kMYHM,12016 +resources/onprem/workflow/airflow.png,sha256=M006kqdwaT2PAOFGet2stkGPYnHu8imiIFLmPTSPqBw,29999 +resources/onprem/workflow/digdag.png,sha256=WF2-A8n4PptYPz8CKNg9IhEqaenToZYvBVxSO3M92uo,54041 +resources/onprem/workflow/kubeflow.png,sha256=DewANKgmr6A2swsEjrmnQo1MPLFoI1UTip-13Z7muwk,30494 +resources/onprem/workflow/nifi.png,sha256=S-2ASn9aEwyrYhdfYlbxi5ioWVIXyWwNd6K8akqHnkI,9238 +resources/openstack/apiproxies/ec2api.png,sha256=PRHDTrQ9yJpAGMR4jbGzCADk9Uo-0yRh45zwscYEd1I,32161 +resources/openstack/applicationlifecycle/freezer.png,sha256=zvEpQzsVaSSpOqXC0aTZ-e043G-rC-5BmtpE2OEFRU0,24459 +resources/openstack/applicationlifecycle/masakari.png,sha256=fGQT4nlX19FQwdmJujhI6CJOKNyOBwEgcJxhKUoigEo,22456 +resources/openstack/applicationlifecycle/murano.png,sha256=pQEVtOlHc9Uhpe9DjNaX_Nl0eqs6YWzBIJZWYyOW3DU,23461 +resources/openstack/applicationlifecycle/solum.png,sha256=ou_M1GDoMKVpf-PP1JLHaMpbUC0qrcRAiKZ2eVk0MQQ,23924 +resources/openstack/baremetal/cyborg.png,sha256=KpkswTjFFpGswRYmP69ZHnAA_lOc8tPn05UtWO71xTk,24443 +resources/openstack/baremetal/ironic.png,sha256=203GW6BDz9f6L7p_YiSfCKGafBPZy2MqOJee1HOcZ7M,33163 +resources/openstack/billing/cloudkitty.png,sha256=UpJBp2VsJg8gc8yHz-HlN-kNlyCz2MQ6xxyh9x5k120,39258 +resources/openstack/compute/nova.png,sha256=waRMzIM28wYnXzdd3ltiTyTPJFsgw7NIaFhEJUWKNgk,29449 +resources/openstack/compute/qinling.png,sha256=ms_Hp9yiOyRgruq87Moi0O7lmjvUZNn8zUIRxf5VIQ8,29380 +resources/openstack/compute/zun.png,sha256=3TtQyfDnBzlSq5yqXlx0WrWw23IDlkR_OeCkiXjhiOo,26256 +resources/openstack/containerservices/kuryr.png,sha256=-jTmpqRokYCWIsHLEbkTq2GeavWce3sM-XbVjOYl2A0,13845 +resources/openstack/deployment/ansible.png,sha256=Mv-ChCKmnrnGfE4Tf6IYwlJ29WGF1bL3nSHPRPZXtNQ,24132 +resources/openstack/deployment/charms.png,sha256=SgMJN-B-BcMQhhQzQpbYIUwCoiTW7Q8yXAt1S9Jb6lI,31608 +resources/openstack/deployment/chef.png,sha256=vsHynkpfovIz6nEHSix2Fyn_Kj1KGGTKPqZlm2LWOTM,19570 +resources/openstack/deployment/helm.png,sha256=_1QhUP9SDVBOX2EW5RHh67c7GjjttpupEvkLUEGFvlw,17450 +resources/openstack/deployment/kolla.png,sha256=MTauVfkcAFRU_KSk3g_GKJaok06Aim2_DKVNL3kMkaE,29103 +resources/openstack/deployment/tripleo.png,sha256=x5Bq2CimfrHpKcKSOc4_ISrEvXJfOY_YBDwPNe82MQM,41234 +resources/openstack/frontend/horizon.png,sha256=lb66avC05IByPHLFBy8x1sOqQLBY407UWNZ97iaeNl4,33870 +resources/openstack/monitoring/monasca.png,sha256=2jlhxtd-xYHZdiBsI9Xk01KfagwlXX8rg3U7_yH8YBs,24616 +resources/openstack/monitoring/telemetry.png,sha256=NtmYoYpOXtUY26EmCLpRwa_KeY4MEoB9U7BIWzzGNyc,17726 +resources/openstack/multiregion/tricircle.png,sha256=YbdmwEMpCghq2EkZYu60KorXbTH76EQPvlgNPC6cuAU,22388 +resources/openstack/networking/designate.png,sha256=jpaGOboZaQFTGg4U9ACrobtEeoLJj5-2hJcGNxeaoEM,12215 +resources/openstack/networking/neutron.png,sha256=8nh8zIgjRMsBL6_ohilE8kxvNXfvythvFtNZz4h5q4E,24731 +resources/openstack/networking/octavia.png,sha256=NdMmrxEZyuv6jJTnxgJ6hhX_SLM6L0UTVAD64f7nS0Q,58796 +resources/openstack/nfv/tacker.png,sha256=JZlODxeudvdQjgBHPqZp2ByUNdhv2Lhvb3E9e4nb-Sg,24034 +resources/openstack/openstack.png,sha256=1NBAEfopreFqBMhB5uXJITG00R7VRvLS8KCqR1HtEqs,16042 +resources/openstack/optimization/congress.png,sha256=UhOBQaKy2TKg4DgdungKmNUxZRkdjgXtOFga6vdHIEU,17139 +resources/openstack/optimization/rally.png,sha256=Q6-aQNmF3p0nq6THajfcnTxj2Oriq-J9BNkogeLGVGk,20286 +resources/openstack/optimization/vitrage.png,sha256=g4VdGrED7rhmdgjstpgzzbWbNLpTbMRsQ5mBGLuFDnU,20257 +resources/openstack/optimization/watcher.png,sha256=C5OPSkNh3UhN8KE3bUjIMGeAPpuJwxlIN55ZzvRgGNY,16956 +resources/openstack/orchestration/blazar.png,sha256=8YQIpKJkHykp65jb2RKJclrtDEaGVm8g8Wp05U06wSo,26939 +resources/openstack/orchestration/heat.png,sha256=Eek4nCLU6U14LLHa23UiouknAeReIe-S3ZYvBC1fDw8,17967 +resources/openstack/orchestration/mistral.png,sha256=Q6tG0KMJRDCsDNoVoCuRVfvDH3VA5XjBd0vRm3FWMiA,18809 +resources/openstack/orchestration/senlin.png,sha256=g1zLVQpQ5w6SUiOcNS3DxZe1zbKCXLwn9eGSih15M7E,36506 +resources/openstack/orchestration/zaqar.png,sha256=jMCtORS-yGksZPUZI2Ccs1nGFo07y4C-JBOvRZNWqZ4,18895 +resources/openstack/packaging/loci.png,sha256=5ggZfikmSqRb9mu48p9F4PhudMvfDb2ONDXxicNZeoA,17734 +resources/openstack/packaging/puppet.png,sha256=m1QAZ78EU9ghFrklgNo7GOdZOGQ9LuDi43W_D-4KV98,21866 +resources/openstack/packaging/rpm.png,sha256=EWuITXgBtKqYv-dgFermg-n4onv0535SA8tLEmEJF40,21395 +resources/openstack/sharedservices/barbican.png,sha256=A4pfE70GxywnjpSqoTaBLYbYGAbUILcv8IKAoo-fJKI,27047 +resources/openstack/sharedservices/glance.png,sha256=duyazAYgkBFq7wcks2i6zaOfm2tLqfgqRyk-Sm5FjUA,29533 +resources/openstack/sharedservices/karbor.png,sha256=8qh4FvLhOT52Vz4--aHc2jEcr9d56HQcaRELJOnlyVQ,15671 +resources/openstack/sharedservices/keystone.png,sha256=tvK_aIzPMYRGx4pc55DxImeLulux2mMse6agT75Gvm4,24374 +resources/openstack/sharedservices/searchlight.png,sha256=UXBPaphx0p12oQldC2NEUHypm-Za2npwFCcGtxSKVk0,18453 +resources/openstack/storage/cinder.png,sha256=ovdRvDMK0pGbjblFaQ0v_bhilKlVI8rsXksjpnCli8g,34645 +resources/openstack/storage/manila.png,sha256=HM2jffnpf-1VKLBI8HNnK6Yfk-jovyQqNKzo3v1v3N8,23298 +resources/openstack/storage/swift.png,sha256=Ozf8dGihTXc4KFIm8xeA-LYuXSU-aZgdMmFuCgSYBuY,20583 +resources/openstack/user/openstackclient.png,sha256=CPRbypx1oIdmXaxosN4Z3Kbb3Mrzo5eyeSbCyeMxtrE,23651 +resources/openstack/workloadprovisioning/magnum.png,sha256=FgyD9uztn_tuwUouNU1O29SHihukPWryAFmw7i2fBMg,22510 +resources/openstack/workloadprovisioning/sahara.png,sha256=TAPl4TYIcyuBnJhy8obgnUzwsfv6or9i1R-Ra2bv9UM,26040 +resources/openstack/workloadprovisioning/trove.png,sha256=J5vuF7r01JPUsV1WmmkgzeJ_j4PKc55ajLucYoPW2qc,20727 +resources/outscale/compute/compute.png,sha256=Tg2Ul2f5z1FHZztqRK7XCrLraydVg_ljuCdmjVLK9dw,14026 +resources/outscale/compute/direct-connect.png,sha256=uQLjahk9LCxxKn4GHHVg2oXchRTWsQv3N9J0JE1nAUQ,21576 +resources/outscale/network/client-vpn.png,sha256=KoqJNe8x8oKJVctvwfvpVU0xBBeC_AkyU2mJoTJbpcg,13651 +resources/outscale/network/internet-service.png,sha256=yGV2qp6yxwbx4vE0iziwr7oVccmkcr1AvgIHHPzGMww,13720 +resources/outscale/network/load-balancer.png,sha256=8M_Br8XYyPqqjxU2yA6z_OsVZ51SH53xFaH2N-6ukJ8,12779 +resources/outscale/network/nat-service.png,sha256=u18vDLjIt_M7budePEVj8y7WtVnV1BxEIiEZeWlTPQA,9455 +resources/outscale/network/net.png,sha256=VUiBPNJU0j7qdZkWjCgS_-Ki0gl6uwtKnE-20Z_vPN4,12217 +resources/outscale/network/site-to-site-vpng.png,sha256=zkEOeJmVDTP1ualPSnb_fv01vH2Gjeq-91oi6XHbKw0,9653 +resources/outscale/outscale.png,sha256=ll_IwoIIG8o8a0hVKHGkn7O409AMI-QKDjDoUwChm4A,12525 +resources/outscale/security/firewall.png,sha256=urso1GcH5JN8OqskRMXmDoLGzlCKWiEhe5GUaznbn8g,11219 +resources/outscale/security/identity-and-access-management.png,sha256=5giR4xViKnyMef4nE8sNT8xMURLCRvfKzPBSJvsG7P4,14753 +resources/outscale/storage/simple-storage-service.png,sha256=IYl4OsLxXKsIp9mq4IXztiyJsuS4noSS1c9WcrrAqxU,12943 +resources/outscale/storage/storage.png,sha256=oef9Ilgn7Jd_qx6mdzb3ZtsbLHuZK-PNKB2IW8sZF8M,15726 +resources/programming/flowchart/action.png,sha256=wRNR8rd0tmEOFngYqVN8ThAhoVuDx9HF2tXyjSJZXxQ,826 +resources/programming/flowchart/collate.png,sha256=HDNqSWnI8hqDfWeh17HhTQ_x6Exmt88O0A-8tBLXTK0,3007 +resources/programming/flowchart/database.png,sha256=4XQ8W7jmrNSpOtK_Wi2Y9iLmWsbLESmRrAUXb0a37Dk,12758 +resources/programming/flowchart/decision.png,sha256=1HXyUV9dpvVyep4SWU-1cglC_sQy3rdgU5gWmiqRdmg,13036 +resources/programming/flowchart/delay.png,sha256=1qeTDT0mzj5rFTIZYAyWLiWJGAYNMUkwFv3QxldvOPs,5047 +resources/programming/flowchart/display.png,sha256=2VKwT4ps4hh1ihNkrlZ36Z09savf4L_GSdFB-O1pqKE,8698 +resources/programming/flowchart/document.png,sha256=uCWqu9ZoBvKAogYa3icioYL2Y6FjUw02nAEZojYd-fA,5539 +resources/programming/flowchart/input-output.png,sha256=wGrpx9bysdlMj9oscaKh4-npwMrw3_gyyzO2fIFQ2xI,6198 +resources/programming/flowchart/inspection.png,sha256=OKtMI62TRHJuIVoHHlloHJPSfGvxc72xjgTSXngZMJI,7234 +resources/programming/flowchart/internal-storage.png,sha256=KsxqmPCzP-0ZWUTXatBHWzza3AvwHZmkPvG0HmIZVsk,1436 +resources/programming/flowchart/loop-limit.png,sha256=RtPw08bJvhRcrTHaGh6nFZ0fC_UFZE0kFCcM_Sg-dUA,1726 +resources/programming/flowchart/manual-input.png,sha256=CfLn6xW_fUlAcskxTlSJctTVz70PIFDHRB9eX6O-scU,5308 +resources/programming/flowchart/manual-loop.png,sha256=KMVJascHSQjMhhz3C7asHKmOqHUTv3oM2VDrH1XuQa4,5979 +resources/programming/flowchart/merge.png,sha256=qcZe4P-v1D9s_U5i4I4g8_9s4h2zAoHN0dSNZZIuv3E,1623 +resources/programming/flowchart/multiple-documents.png,sha256=UkhKuc4UGWusJHzbKjuluw9K8AMMdhth2tIv_Kf9yOo,6319 +resources/programming/flowchart/off-page-connector-left.png,sha256=L6Z8G63u4Q202OG70QOaKCT5W1XUpbKOFIBb3VaZzIc,2053 +resources/programming/flowchart/off-page-connector-right.png,sha256=s9o4ZxtvwWHpCB6oHFdm_Vz20O6nNFPM1T89Jzt8ZRk,1905 +resources/programming/flowchart/or.png,sha256=ISE_y-Z6n2fEp_AGlG9BmPMeyqKZYxsVYHsyfK52XpI,7607 +resources/programming/flowchart/predefined-process.png,sha256=c2VU7npIWmTOPTB-zB9FU0W3aqKBqxr1OAoETdxCrQY,1402 +resources/programming/flowchart/preparation.png,sha256=AK0WxLorGq8MzgXtZdv60IMZg54bOP_1fv7e7nDz9oY,7606 +resources/programming/flowchart/sort.png,sha256=mnB7p7pl69YwvVRvBWQKocEqQFw1rSTLX499gAT7NjU,2714 +resources/programming/flowchart/start-end.png,sha256=Ewb0LvmnJf9ER1THgzsurCxI_5VLARh9QECz5clUy0I,4918 +resources/programming/flowchart/stored-data.png,sha256=5O4pUXwHX_C0kVIVECF03-QsDI6eOJ8tKBogDoF7qwo,8135 +resources/programming/flowchart/summing-junction.png,sha256=h5P2UDbKLFxX69f_RIZbfHwz3vK4UgMjKSTvmDnCbUk,8965 +resources/programming/framework/angular.png,sha256=hZSWidZ6Cab-TNVBVgTs2AD31lnGvdjMV6QoOlw7Q1U,18294 +resources/programming/framework/backbone.png,sha256=slhug3qTfOXB7sJ0LvM1dDzLCIuioLRpBvrcwnhSO7A,23326 +resources/programming/framework/camel.png,sha256=HwPQZYD7hShIPl-7aFcpH_YJeg1Bv2yqR9-OPasmobE,18232 +resources/programming/framework/django.png,sha256=GzUcoW0dNqIvzTvGF8cV1vJeiCVOrcS4l2hffRxws8k,10890 +resources/programming/framework/dotnet.png,sha256=iLV6uavdJatYbvyT4oKubcwxZ5d33e51khyanXe9VZw,15047 +resources/programming/framework/ember.png,sha256=lXvzcn7Ny7LpxeQdGo2OzymLcN9N-2DxlaaQ2-dyu-w,41332 +resources/programming/framework/fastapi.png,sha256=dZHBBtCtzXhQb7wWfMtQI8RGKe2U5632PMOifXiPiRQ,10779 +resources/programming/framework/flask.png,sha256=8J70dbEtQAsnd9IUBffbily4H1Pgac30d5HGPAzHHTk,23130 +resources/programming/framework/flutter.png,sha256=i5-7eyViz_41NvxxihaXC0RPx_m0NqQmry4-0Mpu3Ak,6468 +resources/programming/framework/graphql.png,sha256=5-qRh7bkculfjVmib4Zf6OpWNR0picGgmb-jFtNXemU,15383 +resources/programming/framework/hibernate.png,sha256=CTd0f6EveLiZB8g5Qy0TizJCw2iJwiAUqiCTQ2wm8rI,12433 +resources/programming/framework/jhipster.png,sha256=kHlkX7tVEyfmHCriSjQ4AfhuzhJ0UyP7Av83JRSn7gQ,6888 +resources/programming/framework/laravel.png,sha256=Ppv5h8R94eSqdUj-jxROQXq1PWou11Vk-hOr7gJ1kp0,18845 +resources/programming/framework/micronaut.png,sha256=r5Khnmpg2GqQ2JJSl9F_uKekAzcyU0cGG4JXvcbPAr8,27715 +resources/programming/framework/nextjs.png,sha256=XzaiGo-skxnEisLVJFWhrmPyod-Q9Nj_92DO2K-APZ0,12644 +resources/programming/framework/phoenix.png,sha256=Ebn1VM44ynIlkVoA4ALrf9a1eD6g45zPT8xIufHuAK8,17581 +resources/programming/framework/quarkus.png,sha256=3vPMuLXjT8d2GFJ89A6IZOXFuEIrVxw3slrGtB4Vxmw,5735 +resources/programming/framework/rails.png,sha256=s--6qrx5I_9aIRGDx_3cbGNUrwP-1VZK-6mRj-DLBaY,15168 +resources/programming/framework/react.png,sha256=G3vHKr9XlewYqP6nK2oj96nghsp5eQg8_f9j2rdZp-k,19786 +resources/programming/framework/spring.png,sha256=ufj9npqudndzjver1KBKHJMmFJh8ir0VUL2olJ0eFG8,15864 +resources/programming/framework/sqlpage.png,sha256=sNenVRg2wQm4ZY4yVk1o0KcSELyQ88oY-3nzJUFFmUk,35266 +resources/programming/framework/starlette.png,sha256=utHo3778B-LAiy_gq5SoMZwg8wrN_UkHcgurKQ_wono,10626 +resources/programming/framework/svelte.png,sha256=IP9OCbAyoAGELa67jQNwTkselrdmRS1dUaGZY1tIsnc,17794 +resources/programming/framework/vercel.png,sha256=WGzfXJOdDZn8Hc7stnIGsI4Le-whT3MtJNg4FNLuMvk,3112 +resources/programming/framework/vue.png,sha256=P3iBz3DBMOonpQ1YjO8Bp1er3FRaWUF9dSXSRCokN-o,6610 +resources/programming/language/bash.png,sha256=C--riMVArUgXFpBPw7vvWijjVPaA_e1KpdZFrRguGiE,10745 +resources/programming/language/c.png,sha256=YczBr_5eI9oZTRl2xR7MDFA6wcSXz3oJrz68oyxKNWM,17845 +resources/programming/language/cpp.png,sha256=HsxDGrrJvugiDArCIHnEszIXQpN54gq6KGxikYK3gy0,17290 +resources/programming/language/csharp.png,sha256=462YIpKB95U4Q_e9P04THM_5cRypI60r3_7GxWSQfzs,30872 +resources/programming/language/dart.png,sha256=cs_33HIe6HbcROLcX5F-R9euWQT9ONRHBJSaC3zy0ok,16913 +resources/programming/language/elixir.png,sha256=YZWfujyuGl-nEK8WmwfxKYfodEe6uzTP9dwnQGy4Hkc,40530 +resources/programming/language/erlang.png,sha256=oq1-3H7pLz769yXVmWAmr_mVIowS9cjK7qn4mpAbvdc,10277 +resources/programming/language/go.png,sha256=xKjlcalka0J8sO7WOjEGzYVanG81QI_LLoNMDWruPtE,20418 +resources/programming/language/java.png,sha256=8cK-odRFCI-aOpReqK31cVIcOxJhzVJA0rO9ZDoZt5E,15246 +resources/programming/language/javascript.png,sha256=v-kjDEys_lflwdaU3iwy6maLmjnyi7220LabeDkiR98,22755 +resources/programming/language/kotlin.png,sha256=ZquzkCBvA6jR_IiwXIwEHGK0ab-Y9Eh5veJohUq9upw,10881 +resources/programming/language/latex.png,sha256=C_Q35Bmk0LJ9qiF7hk95OjT6CiGGPlkpJGQRvAuNyNg,4138 +resources/programming/language/matlab.png,sha256=TDzQYKwgiiGJkwpzo1j5hcSO8AAUtVVEChpaMS4vepk,26491 +resources/programming/language/nodejs.png,sha256=U6kud3GbehtHoesbNRb-fkQQN2cTdG6GRgo4MoLRQyU,10520 +resources/programming/language/php.png,sha256=QsV4ORDdr5-wn6-tIjaiK8N2QgSAPxOSloMyizjZrYA,69258 +resources/programming/language/python.png,sha256=hCy0_BzvNBloDrsmJWNZ4kM7Xys2a8o3BuG73Jph7aI,29539 +resources/programming/language/r.png,sha256=quHTZU6VspZpzhKo5Y_znI0wLunFiPEPDU9OzdQY0vM,16909 +resources/programming/language/ruby.png,sha256=7OELyeEYCIBZS-iPdRQ00AiaydBv4D7mBumPtunNYW0,175525 +resources/programming/language/rust.png,sha256=iMkpFd3o-4GIL_7k-lXcmKYtx7_PwuSWVaneWmbS0HM,9211 +resources/programming/language/scala.png,sha256=Mpct7aZLbBust6p-IkGyr4IpEsA-QwvIdYo9equDZ64,4830 +resources/programming/language/sql.png,sha256=RPmW_u8A690DNjEjoPk3dZooExqkMgo_Tzl_EM9KqfQ,55186 +resources/programming/language/swift.png,sha256=KaVHsgrPY4gjvlliob8RVRTqfPwUSTLXWh4JV0_2Sdc,14741 +resources/programming/language/typescript.png,sha256=nMhPebPaPSEvyiC7RSsoZXLprd11wm166uwpYJLjuSo,8451 +resources/programming/programming.png,sha256=eVA2vB-mTqHbTBlO1af_raMR5ID03PSMm9NNdjN-h34,6877 +resources/programming/runtime/dapr.png,sha256=EK1fRDdLfRTxv3CcHtUdtoEU0iUs0LIUgPWumcRph60,9771 +resources/saas/alerting/newrelic.png,sha256=chIF57ErofzGWzmKw20gZFKTJI5OGIZKT0TVQ9MSOfU,25708 +resources/saas/alerting/opsgenie.png,sha256=pdvGiY1Xxw7t8vtbXhb3iWoakBV63Gtw2ZIKjejae9Y,24531 +resources/saas/alerting/pagerduty.png,sha256=MZDjFOdBOrxdOG5vAhMJ07uz168K_Xr_QYTuwBoOwTI,2192 +resources/saas/alerting/pushover.png,sha256=JHmVYJJWlTjjCR8WzGAs0gys5VXiskgMJAsratYBgjY,47742 +resources/saas/alerting/xmatters.png,sha256=nlOnEOaKVGWCGrL7TsmJkBiP8RAursXkDXRTyM9LVrM,37517 +resources/saas/analytics/dataform.png,sha256=DgMRzg0Z4QXNMYOCgTMZU7tawg3YAPA3YerXcvMCt04,6952 +resources/saas/analytics/snowflake.png,sha256=P3BnKX4qJ4JN712edneenrx_Gx4efb0wy5-80ddLbb0,16205 +resources/saas/analytics/stitch.png,sha256=r105GpoTPPuC2pr14SIjZtCwHHU1bPS2h-qtgHWGxE8,4507 +resources/saas/automation/n8n.png,sha256=qqgbtFxMIwHogxLovFX6q6D5H3rmZh3dzkq-Y-nJfyg,14887 +resources/saas/cdn/akamai.png,sha256=ohRYXKtKEdwUMjuJ7wG-Iitth8iEqaar5AstuWp2wvs,8419 +resources/saas/cdn/cloudflare.png,sha256=UZJ2grLwRMJhV3qzC5Q2KKilPRnM0UPPoZOLPxN91p0,11616 +resources/saas/cdn/fastly.png,sha256=6t7-LdMd0YWPnCFgKzYTM56kvcdrHYwmWiWp94Ecn5A,7028 +resources/saas/chat/discord.png,sha256=MDBMToJhtD5NCOC0TtlKi9Ayk8muw9YfhSRaOMqxHj4,1559 +resources/saas/chat/line.png,sha256=mF89C134wxstrjysKuIZIwbedstwQflCwVhCSBPYwtw,10342 +resources/saas/chat/mattermost.png,sha256=JytiDqg0IUvf5VX1gyWXy8elc51qdmery-RshvHlx-8,7611 +resources/saas/chat/messenger.png,sha256=Z6aOf353u2-CgVtPBikp98HyklZyby0Dbu2Ld6i8FVs,46721 +resources/saas/chat/rocket-chat.png,sha256=8-HmrloaEixjC8BYr1KIufIT2E0_aK7aTQqowY8Z6iE,18207 +resources/saas/chat/slack.png,sha256=FmmU4XQ7XOMxYLzuor4ihQAMR67d-B5WarLmhIhMj5s,17813 +resources/saas/chat/teams.png,sha256=bZAVKMry_UrQGA77CCsssc29QhEmyKi59q5pxvu2ang,6982 +resources/saas/chat/telegram.png,sha256=Bqypycyz3LOWJV7ZiWlgtCObmh0ISlfVwYLpCw6mCek,9324 +resources/saas/communication/twilio.png,sha256=4cMsFQu1TT92fG1zwU3jc9UJCiic-8VSHxCbl6RFpRE,29279 +resources/saas/crm/intercom.png,sha256=oJ66lDqfQpHK7W2ChDTqphiRWsuc4MRFOrdR27ar1cI,8108 +resources/saas/crm/zendesk.png,sha256=waBpCNa1b9W1sw_2kj4UTRtiJF-XhYJWPcuZqGqXnJ8,8090 +resources/saas/filesharing/nextcloud.png,sha256=DeP2TwPepk72yb3_aKB4p9XLprH_vFoybdShQBnk5-g,11592 +resources/saas/identity/auth0.png,sha256=FKJGzMIpvPgaEti_DxKAkRcDf6P4DtjvhlnS3NNEwAg,11415 +resources/saas/identity/okta.png,sha256=gLJ64oICzXIT7JVQeJKKw4-TP4BMsGtSeh3znNWC-aQ,9199 +resources/saas/logging/datadog.png,sha256=9wiSRPOc9efYKNMXqh9cvwoFqTnssP1wplw7UvPWPbk,43848 +resources/saas/logging/newrelic.png,sha256=6SI76f08anrvIVegR3c7GJLUT2kXs0IkWpNlnvB_LzA,8180 +resources/saas/logging/papertrail.png,sha256=vp-uNXjG-7xoDPSJ_ZUvyZxF1VQnjpCBGQqYkp1Oabs,7004 +resources/saas/media/cloudinary.png,sha256=AmQ8B7o6dDAHSD4ruwf9cdkRHXrdNmVSb05ZdHopFt0,19809 +resources/saas/recommendation/recombee.png,sha256=a9bkNmHROyFvvjIz7VQCidA5KxojPASE9ae2Gd1NtGs,13613 +resources/saas/saas.png,sha256=Vbqws9lww75B09I4sY6q2zdmU0pG5vtn1hMVgmGNFwg,7188 +resources/saas/security/crowdstrike.png,sha256=7ZFy9mWf68NctXiYOavpl3lBv6unxLkhrJNTP7TRkNc,16001 +resources/saas/security/sonarqube.png,sha256=oyQw8qcMJgVCQI1p_iOTkPoTvyJKtYKpkSHJij4d3z4,10598 +resources/saas/social/facebook.png,sha256=LDd789pwMzvIkvdipg3O10dTUzIlfy1V1oesZkKzKXU,14991 +resources/saas/social/twitter.png,sha256=VuLOzcbjRGW30Mrris_yUY97AizFiiJRO3xLAp-MPKQ,10275 diff --git a/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/REQUESTED b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/WHEEL b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/WHEEL new file mode 100644 index 00000000..8b9b3a1b --- /dev/null +++ b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry-core 1.9.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/entry_points.txt b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/entry_points.txt new file mode 100644 index 00000000..2121c573 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams-0.24.4.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +diagrams=diagrams.cli:main + diff --git a/.venv/Lib/site-packages/diagrams/__init__.py b/.venv/Lib/site-packages/diagrams/__init__.py new file mode 100644 index 00000000..66c3458f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/__init__.py @@ -0,0 +1,571 @@ +import contextvars +import os +import uuid +from pathlib import Path +from typing import Dict, List, Optional, Union + +from graphviz import Digraph + +# Global contexts for a diagrams and a cluster. +# +# These global contexts are for letting the clusters and nodes know +# where context they are belong to. So the all clusters and nodes does +# not need to specify the current diagrams or cluster via parameters. +__diagram = contextvars.ContextVar("diagrams") +__cluster = contextvars.ContextVar("cluster") + + +def getdiagram() -> "Diagram": + try: + return __diagram.get() + except LookupError: + return None + + +def setdiagram(diagram: "Diagram"): + __diagram.set(diagram) + + +def getcluster() -> "Cluster": + try: + return __cluster.get() + except LookupError: + return None + + +def setcluster(cluster: "Cluster"): + __cluster.set(cluster) + + +class Diagram: + __directions = ("TB", "BT", "LR", "RL") + __curvestyles = ("ortho", "curved") + __outformats = ("png", "jpg", "svg", "pdf", "dot") + + # fmt: off + _default_graph_attrs = { + "pad": "2.0", + "splines": "ortho", + "nodesep": "0.60", + "ranksep": "0.75", + "fontname": "Sans-Serif", + "fontsize": "15", + "fontcolor": "#2D3436", + } + _default_node_attrs = { + "shape": "box", + "style": "rounded", + "fixedsize": "true", + "width": "1.4", + "height": "1.4", + "labelloc": "b", + # imagepos attribute is not backward compatible + # TODO: check graphviz version to see if "imagepos" is available >= 2.40 + # https://github.com/xflr6/graphviz/blob/master/graphviz/backend.py#L248 + # "imagepos": "tc", + "imagescale": "true", + "fontname": "Sans-Serif", + "fontsize": "13", + "fontcolor": "#2D3436", + } + _default_edge_attrs = { + "color": "#7B8894", + } + + # fmt: on + + # TODO: Label position option + # TODO: Save directory option (filename + directory?) + def __init__( + self, + name: str = "", + filename: str = "", + direction: str = "LR", + curvestyle: str = "ortho", + outformat: Union[str, list[str]] = "png", + autolabel: bool = False, + show: bool = True, + strict: bool = False, + graph_attr: Optional[dict] = None, + node_attr: Optional[dict] = None, + edge_attr: Optional[dict] = None, + ): + """Diagram represents a global diagrams context. + + :param name: Diagram name. It will be used for output filename if the + filename isn't given. + :param filename: The output filename, without the extension (.png). + If not given, it will be generated from the name. + :param direction: Data flow direction. Default is 'left to right'. + :param curvestyle: Curve bending style. One of "ortho" or "curved". + :param outformat: Output file format. Default is 'png'. + :param show: Open generated image after save if true, just only save otherwise. + :param graph_attr: Provide graph_attr dot config attributes. + :param node_attr: Provide node_attr dot config attributes. + :param edge_attr: Provide edge_attr dot config attributes. + :param strict: Rendering should merge multi-edges. + """ + if graph_attr is None: + graph_attr = {} + if node_attr is None: + node_attr = {} + if edge_attr is None: + edge_attr = {} + self.name = name + if not name and not filename: + filename = "diagrams_image" + elif not filename: + filename = "_".join(self.name.split()).lower() + self.filename = filename + self.dot = Digraph(self.name, filename=self.filename, strict=strict) + + # Set attributes. + for k, v in self._default_graph_attrs.items(): + self.dot.graph_attr[k] = v + self.dot.graph_attr["label"] = self.name + for k, v in self._default_node_attrs.items(): + self.dot.node_attr[k] = v + for k, v in self._default_edge_attrs.items(): + self.dot.edge_attr[k] = v + + if not self._validate_direction(direction): + raise ValueError(f'"{direction}" is not a valid direction') + self.dot.graph_attr["rankdir"] = direction + + if not self._validate_curvestyle(curvestyle): + raise ValueError(f'"{curvestyle}" is not a valid curvestyle') + self.dot.graph_attr["splines"] = curvestyle + + if isinstance(outformat, list): + for one_format in outformat: + if not self._validate_outformat(one_format): + raise ValueError( + f'"{one_format}" is not a valid output format') + else: + if not self._validate_outformat(outformat): + raise ValueError(f'"{outformat}" is not a valid output format') + self.outformat = outformat + + # Merge passed in attributes + self.dot.graph_attr.update(graph_attr) + self.dot.node_attr.update(node_attr) + self.dot.edge_attr.update(edge_attr) + + self.show = show + self.autolabel = autolabel + + def __str__(self) -> str: + return str(self.dot) + + def __enter__(self): + setdiagram(self) + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.render() + # Remove the graphviz file leaving only the image. + os.remove(self.filename) + setdiagram(None) + + def _repr_png_(self): + return self.dot.pipe(format="png") + + def _validate_direction(self, direction: str) -> bool: + return direction.upper() in self.__directions + + def _validate_curvestyle(self, curvestyle: str) -> bool: + return curvestyle.lower() in self.__curvestyles + + def _validate_outformat(self, outformat: str) -> bool: + return outformat.lower() in self.__outformats + + def node(self, nodeid: str, label: str, **attrs) -> None: + """Create a new node.""" + self.dot.node(nodeid, label=label, **attrs) + + def connect(self, node: "Node", node2: "Node", edge: "Edge") -> None: + """Connect the two Nodes.""" + self.dot.edge(node.nodeid, node2.nodeid, **edge.attrs) + + def subgraph(self, dot: Digraph) -> None: + """Create a subgraph for clustering""" + self.dot.subgraph(dot) + + def render(self) -> None: + if isinstance(self.outformat, list): + for one_format in self.outformat: + self.dot.render(format=one_format, view=self.show, quiet=True) + else: + self.dot.render(format=self.outformat, view=self.show, quiet=True) + + +class Cluster: + __directions = ("TB", "BT", "LR", "RL") + __bgcolors = ("#E5F5FD", "#EBF3E7", "#ECE8F6", "#FDF7E3") + + # fmt: off + _default_graph_attrs = { + "shape": "box", + "style": "rounded", + "labeljust": "l", + "pencolor": "#AEB6BE", + "fontname": "Sans-Serif", + "fontsize": "12", + } + + # fmt: on + + # FIXME: + # Cluster direction does not work now. Graphviz couldn't render + # correctly for a subgraph that has a different rank direction. + def __init__( + self, + label: str = "cluster", + direction: str = "LR", + graph_attr: Optional[dict] = None, + ): + """Cluster represents a cluster context. + + :param label: Cluster label. + :param direction: Data flow direction. Default is 'left to right'. + :param graph_attr: Provide graph_attr dot config attributes. + """ + if graph_attr is None: + graph_attr = {} + self.label = label + self.name = "cluster_" + self.label + + self.dot = Digraph(self.name) + + # Set attributes. + for k, v in self._default_graph_attrs.items(): + self.dot.graph_attr[k] = v + self.dot.graph_attr["label"] = self.label + + if not self._validate_direction(direction): + raise ValueError(f'"{direction}" is not a valid direction') + self.dot.graph_attr["rankdir"] = direction + + # Node must be belong to a diagrams. + self._diagram = getdiagram() + if self._diagram is None: + raise EnvironmentError("Global diagrams context not set up") + self._parent = getcluster() + + # Set cluster depth for distinguishing the background color + self.depth = self._parent.depth + 1 if self._parent else 0 + coloridx = self.depth % len(self.__bgcolors) + self.dot.graph_attr["bgcolor"] = self.__bgcolors[coloridx] + + # Merge passed in attributes + self.dot.graph_attr.update(graph_attr) + + def __enter__(self): + setcluster(self) + return self + + def __exit__(self, exc_type, exc_value, traceback): + if self._parent: + self._parent.subgraph(self.dot) + else: + self._diagram.subgraph(self.dot) + setcluster(self._parent) + + def _validate_direction(self, direction: str) -> bool: + return direction.upper() in self.__directions + + def node(self, nodeid: str, label: str, **attrs) -> None: + """Create a new node in the cluster.""" + self.dot.node(nodeid, label=label, **attrs) + + def subgraph(self, dot: Digraph) -> None: + self.dot.subgraph(dot) + + +class Node: + """Node represents a node for a specific backend service.""" + + _provider = None + _type = None + + _icon_dir = None + _icon = None + + _height = 1.9 + + def __init__(self, label: str = "", *, nodeid: str = None, **attrs: Dict): + """Node represents a system component. + + :param label: Node label. + """ + # Generates an ID for identifying a node, unless specified + self._id = nodeid or self._rand_id() + self.label = label + + # Node must be belong to a diagrams. + self._diagram = getdiagram() + if self._diagram is None: + raise EnvironmentError("Global diagrams context not set up") + + if self._diagram.autolabel: + prefix = self.__class__.__name__ + if self.label: + self.label = prefix + "\n" + self.label + else: + self.label = prefix + + # fmt: off + # If a node has an icon, increase the height slightly to avoid + # that label being spanned between icon image and white space. + # Increase the height by the number of new lines included in the label. + padding = 0.4 * (self.label.count('\n')) + self._attrs = { + "shape": "none", + "height": str(self._height + padding), + "image": self._load_icon(), + } if self._icon else {} + + # fmt: on + self._attrs.update(attrs) + + self._cluster = getcluster() + + # If a node is in the cluster context, add it to cluster. + if self._cluster: + self._cluster.node(self._id, self.label, **self._attrs) + else: + self._diagram.node(self._id, self.label, **self._attrs) + + def __repr__(self): + _name = self.__class__.__name__ + return f"<{self._provider}.{self._type}.{_name}>" + + def __sub__(self, other: Union["Node", List["Node"], "Edge"]): + """Implement Self - Node, Self - [Nodes] and Self - Edge.""" + if isinstance(other, list): + for node in other: + self.connect(node, Edge(self)) + return other + elif isinstance(other, Node): + return self.connect(other, Edge(self)) + else: + other.node = self + return other + + def __rsub__(self, other: Union[List["Node"], List["Edge"]]): + """Called for [Nodes] and [Edges] - Self because list don't have __sub__ operators.""" + for o in other: + if isinstance(o, Edge): + o.connect(self) + else: + o.connect(self, Edge(self)) + return self + + def __rshift__(self, other: Union["Node", List["Node"], "Edge"]): + """Implements Self >> Node, Self >> [Nodes] and Self Edge.""" + if isinstance(other, list): + for node in other: + self.connect(node, Edge(self, forward=True)) + return other + elif isinstance(other, Node): + return self.connect(other, Edge(self, forward=True)) + else: + other.forward = True + other.node = self + return other + + def __lshift__(self, other: Union["Node", List["Node"], "Edge"]): + """Implements Self << Node, Self << [Nodes] and Self << Edge.""" + if isinstance(other, list): + for node in other: + self.connect(node, Edge(self, reverse=True)) + return other + elif isinstance(other, Node): + return self.connect(other, Edge(self, reverse=True)) + else: + other.reverse = True + return other.connect(self) + + def __rrshift__(self, other: Union[List["Node"], List["Edge"]]): + """Called for [Nodes] and [Edges] >> Self because list don't have __rshift__ operators.""" + for o in other: + if isinstance(o, Edge): + o.forward = True + o.connect(self) + else: + o.connect(self, Edge(self, forward=True)) + return self + + def __rlshift__(self, other: Union[List["Node"], List["Edge"]]): + """Called for [Nodes] << Self because list of Nodes don't have __lshift__ operators.""" + for o in other: + if isinstance(o, Edge): + o.reverse = True + o.connect(self) + else: + o.connect(self, Edge(self, reverse=True)) + return self + + @property + def nodeid(self): + return self._id + + # TODO: option for adding flow description to the connection edge + def connect(self, node: "Node", edge: "Edge"): + """Connect to other node. + + :param node: Other node instance. + :param edge: Type of the edge. + :return: Connected node. + """ + if not isinstance(node, Node): + ValueError(f"{node} is not a valid Node") + if not isinstance(edge, Edge): + ValueError(f"{edge} is not a valid Edge") + # An edge must be added on the global diagrams, not a cluster. + self._diagram.connect(self, node, edge) + return node + + @staticmethod + def _rand_id(): + return uuid.uuid4().hex + + def _load_icon(self): + basedir = Path(os.path.abspath(os.path.dirname(__file__))) + return os.path.join(basedir.parent, self._icon_dir, self._icon) + + +class Edge: + """Edge represents an edge between two nodes.""" + + _default_edge_attrs = { + "fontcolor": "#2D3436", + "fontname": "Sans-Serif", + "fontsize": "13", + } + + def __init__( + self, + node: "Node" = None, + forward: bool = False, + reverse: bool = False, + label: str = "", + color: str = "", + style: str = "", + **attrs: Dict, + ): + """Edge represents an edge between two nodes. + + :param node: Parent node. + :param forward: Points forward. + :param reverse: Points backward. + :param label: Edge label. + :param color: Edge color. + :param style: Edge style. + :param attrs: Other edge attributes + """ + if node is not None: + assert isinstance(node, Node) + + self.node = node + self.forward = forward + self.reverse = reverse + + self._attrs = {} + + # Set attributes. + for k, v in self._default_edge_attrs.items(): + self._attrs[k] = v + + if label: + # Graphviz complaining about using label for edges, so replace it with xlabel. + # Update: xlabel option causes the misaligned label position: + # https://github.com/mingrammer/diagrams/issues/83 + self._attrs["label"] = label + if color: + self._attrs["color"] = color + if style: + self._attrs["style"] = style + self._attrs.update(attrs) + + def __sub__(self, other: Union["Node", "Edge", List["Node"]]): + """Implement Self - Node or Edge and Self - [Nodes]""" + return self.connect(other) + + def __rsub__(self, other: Union[List["Node"], + List["Edge"]]) -> List["Edge"]: + """Called for [Nodes] or [Edges] - Self because list don't have __sub__ operators.""" + return self.append(other) + + def __rshift__(self, other: Union["Node", "Edge", List["Node"]]): + """Implements Self >> Node or Edge and Self >> [Nodes].""" + self.forward = True + return self.connect(other) + + def __lshift__(self, other: Union["Node", "Edge", List["Node"]]): + """Implements Self << Node or Edge and Self << [Nodes].""" + self.reverse = True + return self.connect(other) + + def __rrshift__(self, + other: Union[List["Node"], + List["Edge"]]) -> List["Edge"]: + """Called for [Nodes] or [Edges] >> Self because list of Edges don't have __rshift__ operators.""" + return self.append(other, forward=True) + + def __rlshift__(self, + other: Union[List["Node"], + List["Edge"]]) -> List["Edge"]: + """Called for [Nodes] or [Edges] << Self because list of Edges don't have __lshift__ operators.""" + return self.append(other, reverse=True) + + def append(self, + other: Union[List["Node"], + List["Edge"]], + forward=None, + reverse=None) -> List["Edge"]: + result = [] + for o in other: + if isinstance(o, Edge): + o.forward = forward if forward else o.forward + o.reverse = reverse if reverse else o.reverse + self._attrs = o.attrs.copy() + result.append(o) + else: + result.append( + Edge( + o, + forward=forward, + reverse=reverse, + **self._attrs)) + return result + + def connect(self, other: Union["Node", "Edge", List["Node"]]): + if isinstance(other, list): + for node in other: + self.node.connect(node, self) + return other + elif isinstance(other, Edge): + self._attrs = other._attrs.copy() + return self + else: + if self.node is not None: + return self.node.connect(other, self) + else: + self.node = other + return self + + @property + def attrs(self) -> Dict: + if self.forward and self.reverse: + direction = "both" + elif self.forward: + direction = "forward" + elif self.reverse: + direction = "back" + else: + direction = "none" + return {**self._attrs, "dir": direction} + + +Group = Cluster diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/__init__.py b/.venv/Lib/site-packages/diagrams/alibabacloud/__init__.py new file mode 100644 index 00000000..7d4fa2b5 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/__init__.py @@ -0,0 +1,16 @@ +""" +AlibabaCloud provides a set of services for Alibaba Cloud provider. +""" + +from diagrams import Node + + +class _AlibabaCloud(Node): + _provider = "alibabacloud" + _icon_dir = "resources/alibabacloud" + + fontcolor = "#ffffff" + + +class AlibabaCloud(_AlibabaCloud): + _icon = "alibabacloud.png" diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/analytics.py b/.venv/Lib/site-packages/diagrams/alibabacloud/analytics.py new file mode 100644 index 00000000..b224af29 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/analytics.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AlibabaCloud + + +class _Analytics(_AlibabaCloud): + _type = "analytics" + _icon_dir = "resources/alibabacloud/analytics" + + +class AnalyticDb(_Analytics): + _icon = "analytic-db.png" + + +class ClickHouse(_Analytics): + _icon = "click-house.png" + + +class DataLakeAnalytics(_Analytics): + _icon = "data-lake-analytics.png" + + +class ElaticMapReduce(_Analytics): + _icon = "elatic-map-reduce.png" + + +class OpenSearch(_Analytics): + _icon = "open-search.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/application.py b/.venv/Lib/site-packages/diagrams/alibabacloud/application.py new file mode 100644 index 00000000..8f9da8e9 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/application.py @@ -0,0 +1,72 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AlibabaCloud + + +class _Application(_AlibabaCloud): + _type = "application" + _icon_dir = "resources/alibabacloud/application" + + +class ApiGateway(_Application): + _icon = "api-gateway.png" + + +class BeeBot(_Application): + _icon = "bee-bot.png" + + +class BlockchainAsAService(_Application): + _icon = "blockchain-as-a-service.png" + + +class CloudCallCenter(_Application): + _icon = "cloud-call-center.png" + + +class CodePipeline(_Application): + _icon = "code-pipeline.png" + + +class DirectMail(_Application): + _icon = "direct-mail.png" + + +class LogService(_Application): + _icon = "log-service.png" + + +class MessageNotificationService(_Application): + _icon = "message-notification-service.png" + + +class NodeJsPerformancePlatform(_Application): + _icon = "node-js-performance-platform.png" + + +class OpenSearch(_Application): + _icon = "open-search.png" + + +class PerformanceTestingService(_Application): + _icon = "performance-testing-service.png" + + +class RdCloud(_Application): + _icon = "rd-cloud.png" + + +class SmartConversationAnalysis(_Application): + _icon = "smart-conversation-analysis.png" + + +class Yida(_Application): + _icon = "yida.png" + + +# Aliases + +SLS = LogService +MNS = MessageNotificationService +PTS = PerformanceTestingService +SCA = SmartConversationAnalysis diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/communication.py b/.venv/Lib/site-packages/diagrams/alibabacloud/communication.py new file mode 100644 index 00000000..2906fb17 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/communication.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AlibabaCloud + + +class _Communication(_AlibabaCloud): + _type = "communication" + _icon_dir = "resources/alibabacloud/communication" + + +class DirectMail(_Communication): + _icon = "direct-mail.png" + + +class MobilePush(_Communication): + _icon = "mobile-push.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/compute.py b/.venv/Lib/site-packages/diagrams/alibabacloud/compute.py new file mode 100644 index 00000000..4ac57006 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/compute.py @@ -0,0 +1,83 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AlibabaCloud + + +class _Compute(_AlibabaCloud): + _type = "compute" + _icon_dir = "resources/alibabacloud/compute" + + +class AutoScaling(_Compute): + _icon = "auto-scaling.png" + + +class BatchCompute(_Compute): + _icon = "batch-compute.png" + + +class ContainerRegistry(_Compute): + _icon = "container-registry.png" + + +class ContainerService(_Compute): + _icon = "container-service.png" + + +class ElasticComputeService(_Compute): + _icon = "elastic-compute-service.png" + + +class ElasticContainerInstance(_Compute): + _icon = "elastic-container-instance.png" + + +class ElasticHighPerformanceComputing(_Compute): + _icon = "elastic-high-performance-computing.png" + + +class ElasticSearch(_Compute): + _icon = "elastic-search.png" + + +class FunctionCompute(_Compute): + _icon = "function-compute.png" + + +class OperationOrchestrationService(_Compute): + _icon = "operation-orchestration-service.png" + + +class ResourceOrchestrationService(_Compute): + _icon = "resource-orchestration-service.png" + + +class ServerLoadBalancer(_Compute): + _icon = "server-load-balancer.png" + + +class ServerlessAppEngine(_Compute): + _icon = "serverless-app-engine.png" + + +class SimpleApplicationServer(_Compute): + _icon = "simple-application-server.png" + + +class WebAppService(_Compute): + _icon = "web-app-service.png" + + +# Aliases + +ESS = AutoScaling +ECS = ElasticComputeService +ECI = ElasticContainerInstance +EHPC = ElasticHighPerformanceComputing +FC = FunctionCompute +OOS = OperationOrchestrationService +ROS = ResourceOrchestrationService +SLB = ServerLoadBalancer +SAE = ServerlessAppEngine +SAS = SimpleApplicationServer +WAS = WebAppService diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/database.py b/.venv/Lib/site-packages/diagrams/alibabacloud/database.py new file mode 100644 index 00000000..25eaf40b --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/database.py @@ -0,0 +1,86 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AlibabaCloud + + +class _Database(_AlibabaCloud): + _type = "database" + _icon_dir = "resources/alibabacloud/database" + + +class ApsaradbCassandra(_Database): + _icon = "apsaradb-cassandra.png" + + +class ApsaradbHbase(_Database): + _icon = "apsaradb-hbase.png" + + +class ApsaradbMemcache(_Database): + _icon = "apsaradb-memcache.png" + + +class ApsaradbMongodb(_Database): + _icon = "apsaradb-mongodb.png" + + +class ApsaradbOceanbase(_Database): + _icon = "apsaradb-oceanbase.png" + + +class ApsaradbPolardb(_Database): + _icon = "apsaradb-polardb.png" + + +class ApsaradbPostgresql(_Database): + _icon = "apsaradb-postgresql.png" + + +class ApsaradbPpas(_Database): + _icon = "apsaradb-ppas.png" + + +class ApsaradbRedis(_Database): + _icon = "apsaradb-redis.png" + + +class ApsaradbSqlserver(_Database): + _icon = "apsaradb-sqlserver.png" + + +class DataManagementService(_Database): + _icon = "data-management-service.png" + + +class DataTransmissionService(_Database): + _icon = "data-transmission-service.png" + + +class DatabaseBackupService(_Database): + _icon = "database-backup-service.png" + + +class DisributeRelationalDatabaseService(_Database): + _icon = "disribute-relational-database-service.png" + + +class GraphDatabaseService(_Database): + _icon = "graph-database-service.png" + + +class HybriddbForMysql(_Database): + _icon = "hybriddb-for-mysql.png" + + +class RelationalDatabaseService(_Database): + _icon = "relational-database-service.png" + + +# Aliases + +DMS = DataManagementService +DTS = DataTransmissionService +DBS = DatabaseBackupService +DRDS = DisributeRelationalDatabaseService +GDS = GraphDatabaseService +RDS = RelationalDatabaseService diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/iot.py b/.venv/Lib/site-packages/diagrams/alibabacloud/iot.py new file mode 100644 index 00000000..3df32fa8 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/iot.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AlibabaCloud + + +class _Iot(_AlibabaCloud): + _type = "iot" + _icon_dir = "resources/alibabacloud/iot" + + +class IotInternetDeviceId(_Iot): + _icon = "iot-internet-device-id.png" + + +class IotLinkWan(_Iot): + _icon = "iot-link-wan.png" + + +class IotMobileConnectionPackage(_Iot): + _icon = "iot-mobile-connection-package.png" + + +class IotPlatform(_Iot): + _icon = "iot-platform.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/network.py b/.venv/Lib/site-packages/diagrams/alibabacloud/network.py new file mode 100644 index 00000000..f634d11f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/network.py @@ -0,0 +1,52 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AlibabaCloud + + +class _Network(_AlibabaCloud): + _type = "network" + _icon_dir = "resources/alibabacloud/network" + + +class Cdn(_Network): + _icon = "cdn.png" + + +class CloudEnterpriseNetwork(_Network): + _icon = "cloud-enterprise-network.png" + + +class ElasticIpAddress(_Network): + _icon = "elastic-ip-address.png" + + +class ExpressConnect(_Network): + _icon = "express-connect.png" + + +class NatGateway(_Network): + _icon = "nat-gateway.png" + + +class ServerLoadBalancer(_Network): + _icon = "server-load-balancer.png" + + +class SmartAccessGateway(_Network): + _icon = "smart-access-gateway.png" + + +class VirtualPrivateCloud(_Network): + _icon = "virtual-private-cloud.png" + + +class VpnGateway(_Network): + _icon = "vpn-gateway.png" + + +# Aliases + +CEN = CloudEnterpriseNetwork +EIP = ElasticIpAddress +SLB = ServerLoadBalancer +VPC = VirtualPrivateCloud diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/security.py b/.venv/Lib/site-packages/diagrams/alibabacloud/security.py new file mode 100644 index 00000000..e104ef40 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/security.py @@ -0,0 +1,90 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AlibabaCloud + + +class _Security(_AlibabaCloud): + _type = "security" + _icon_dir = "resources/alibabacloud/security" + + +class AntiBotService(_Security): + _icon = "anti-bot-service.png" + + +class AntiDdosBasic(_Security): + _icon = "anti-ddos-basic.png" + + +class AntiDdosPro(_Security): + _icon = "anti-ddos-pro.png" + + +class AntifraudService(_Security): + _icon = "antifraud-service.png" + + +class BastionHost(_Security): + _icon = "bastion-host.png" + + +class CloudFirewall(_Security): + _icon = "cloud-firewall.png" + + +class CloudSecurityScanner(_Security): + _icon = "cloud-security-scanner.png" + + +class ContentModeration(_Security): + _icon = "content-moderation.png" + + +class CrowdsourcedSecurityTesting(_Security): + _icon = "crowdsourced-security-testing.png" + + +class DataEncryptionService(_Security): + _icon = "data-encryption-service.png" + + +class DbAudit(_Security): + _icon = "db-audit.png" + + +class GameShield(_Security): + _icon = "game-shield.png" + + +class IdVerification(_Security): + _icon = "id-verification.png" + + +class ManagedSecurityService(_Security): + _icon = "managed-security-service.png" + + +class SecurityCenter(_Security): + _icon = "security-center.png" + + +class ServerGuard(_Security): + _icon = "server-guard.png" + + +class SslCertificates(_Security): + _icon = "ssl-certificates.png" + + +class WebApplicationFirewall(_Security): + _icon = "web-application-firewall.png" + + +# Aliases + +ABS = AntiBotService +AS = AntifraudService +CFW = CloudFirewall +CM = ContentModeration +DES = DataEncryptionService +WAF = WebApplicationFirewall diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/storage.py b/.venv/Lib/site-packages/diagrams/alibabacloud/storage.py new file mode 100644 index 00000000..e85c56c3 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/storage.py @@ -0,0 +1,50 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AlibabaCloud + + +class _Storage(_AlibabaCloud): + _type = "storage" + _icon_dir = "resources/alibabacloud/storage" + + +class CloudStorageGateway(_Storage): + _icon = "cloud-storage-gateway.png" + + +class FileStorageHdfs(_Storage): + _icon = "file-storage-hdfs.png" + + +class FileStorageNas(_Storage): + _icon = "file-storage-nas.png" + + +class HybridBackupRecovery(_Storage): + _icon = "hybrid-backup-recovery.png" + + +class HybridCloudDisasterRecovery(_Storage): + _icon = "hybrid-cloud-disaster-recovery.png" + + +class Imm(_Storage): + _icon = "imm.png" + + +class ObjectStorageService(_Storage): + _icon = "object-storage-service.png" + + +class ObjectTableStore(_Storage): + _icon = "object-table-store.png" + + +# Aliases + +HDFS = FileStorageHdfs +NAS = FileStorageNas +HBR = HybridBackupRecovery +HDR = HybridCloudDisasterRecovery +OSS = ObjectStorageService +OTS = ObjectTableStore diff --git a/.venv/Lib/site-packages/diagrams/alibabacloud/web.py b/.venv/Lib/site-packages/diagrams/alibabacloud/web.py new file mode 100644 index 00000000..490bf908 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/alibabacloud/web.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AlibabaCloud + + +class _Web(_AlibabaCloud): + _type = "web" + _icon_dir = "resources/alibabacloud/web" + + +class Dns(_Web): + _icon = "dns.png" + + +class Domain(_Web): + _icon = "domain.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/__init__.py b/.venv/Lib/site-packages/diagrams/aws/__init__.py new file mode 100644 index 00000000..cdf31c3c --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/__init__.py @@ -0,0 +1,16 @@ +""" +AWS provides a set of services for Amazon Web Service provider. +""" + +from diagrams import Node + + +class _AWS(Node): + _provider = "aws" + _icon_dir = "resources/aws" + + fontcolor = "#ffffff" + + +class AWS(_AWS): + _icon = "aws.png" diff --git a/.venv/Lib/site-packages/diagrams/aws/analytics.py b/.venv/Lib/site-packages/diagrams/aws/analytics.py new file mode 100644 index 00000000..2b789c4b --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/analytics.py @@ -0,0 +1,129 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Analytics(_AWS): + _type = "analytics" + _icon_dir = "resources/aws/analytics" + + +class AmazonOpensearchService(_Analytics): + _icon = "amazon-opensearch-service.png" + + +class Analytics(_Analytics): + _icon = "analytics.png" + + +class Athena(_Analytics): + _icon = "athena.png" + + +class CloudsearchSearchDocuments(_Analytics): + _icon = "cloudsearch-search-documents.png" + + +class Cloudsearch(_Analytics): + _icon = "cloudsearch.png" + + +class DataLakeResource(_Analytics): + _icon = "data-lake-resource.png" + + +class DataPipeline(_Analytics): + _icon = "data-pipeline.png" + + +class ElasticsearchService(_Analytics): + _icon = "elasticsearch-service.png" + + +class EMRCluster(_Analytics): + _icon = "emr-cluster.png" + + +class EMREngineMaprM3(_Analytics): + _icon = "emr-engine-mapr-m3.png" + + +class EMREngineMaprM5(_Analytics): + _icon = "emr-engine-mapr-m5.png" + + +class EMREngineMaprM7(_Analytics): + _icon = "emr-engine-mapr-m7.png" + + +class EMREngine(_Analytics): + _icon = "emr-engine.png" + + +class EMRHdfsCluster(_Analytics): + _icon = "emr-hdfs-cluster.png" + + +class EMR(_Analytics): + _icon = "emr.png" + + +class GlueCrawlers(_Analytics): + _icon = "glue-crawlers.png" + + +class GlueDataCatalog(_Analytics): + _icon = "glue-data-catalog.png" + + +class Glue(_Analytics): + _icon = "glue.png" + + +class KinesisDataAnalytics(_Analytics): + _icon = "kinesis-data-analytics.png" + + +class KinesisDataFirehose(_Analytics): + _icon = "kinesis-data-firehose.png" + + +class KinesisDataStreams(_Analytics): + _icon = "kinesis-data-streams.png" + + +class KinesisVideoStreams(_Analytics): + _icon = "kinesis-video-streams.png" + + +class Kinesis(_Analytics): + _icon = "kinesis.png" + + +class LakeFormation(_Analytics): + _icon = "lake-formation.png" + + +class ManagedStreamingForKafka(_Analytics): + _icon = "managed-streaming-for-kafka.png" + + +class Quicksight(_Analytics): + _icon = "quicksight.png" + + +class RedshiftDenseComputeNode(_Analytics): + _icon = "redshift-dense-compute-node.png" + + +class RedshiftDenseStorageNode(_Analytics): + _icon = "redshift-dense-storage-node.png" + + +class Redshift(_Analytics): + _icon = "redshift.png" + + +# Aliases + +ES = ElasticsearchService diff --git a/.venv/Lib/site-packages/diagrams/aws/ar.py b/.venv/Lib/site-packages/diagrams/aws/ar.py new file mode 100644 index 00000000..4cd147d7 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/ar.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Ar(_AWS): + _type = "ar" + _icon_dir = "resources/aws/ar" + + +class ArVr(_Ar): + _icon = "ar-vr.png" + + +class Sumerian(_Ar): + _icon = "sumerian.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/blockchain.py b/.venv/Lib/site-packages/diagrams/aws/blockchain.py new file mode 100644 index 00000000..12efebea --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/blockchain.py @@ -0,0 +1,29 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Blockchain(_AWS): + _type = "blockchain" + _icon_dir = "resources/aws/blockchain" + + +class BlockchainResource(_Blockchain): + _icon = "blockchain-resource.png" + + +class Blockchain(_Blockchain): + _icon = "blockchain.png" + + +class ManagedBlockchain(_Blockchain): + _icon = "managed-blockchain.png" + + +class QuantumLedgerDatabaseQldb(_Blockchain): + _icon = "quantum-ledger-database-qldb.png" + + +# Aliases + +QLDB = QuantumLedgerDatabaseQldb diff --git a/.venv/Lib/site-packages/diagrams/aws/business.py b/.venv/Lib/site-packages/diagrams/aws/business.py new file mode 100644 index 00000000..87f8378b --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/business.py @@ -0,0 +1,29 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Business(_AWS): + _type = "business" + _icon_dir = "resources/aws/business" + + +class AlexaForBusiness(_Business): + _icon = "alexa-for-business.png" + + +class BusinessApplications(_Business): + _icon = "business-applications.png" + + +class Chime(_Business): + _icon = "chime.png" + + +class Workmail(_Business): + _icon = "workmail.png" + + +# Aliases + +A4B = AlexaForBusiness diff --git a/.venv/Lib/site-packages/diagrams/aws/compute.py b/.venv/Lib/site-packages/diagrams/aws/compute.py new file mode 100644 index 00000000..14c54403 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/compute.py @@ -0,0 +1,179 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Compute(_AWS): + _type = "compute" + _icon_dir = "resources/aws/compute" + + +class AppRunner(_Compute): + _icon = "app-runner.png" + + +class ApplicationAutoScaling(_Compute): + _icon = "application-auto-scaling.png" + + +class Batch(_Compute): + _icon = "batch.png" + + +class ComputeOptimizer(_Compute): + _icon = "compute-optimizer.png" + + +class Compute(_Compute): + _icon = "compute.png" + + +class EC2Ami(_Compute): + _icon = "ec2-ami.png" + + +class EC2AutoScaling(_Compute): + _icon = "ec2-auto-scaling.png" + + +class EC2ContainerRegistryImage(_Compute): + _icon = "ec2-container-registry-image.png" + + +class EC2ContainerRegistryRegistry(_Compute): + _icon = "ec2-container-registry-registry.png" + + +class EC2ContainerRegistry(_Compute): + _icon = "ec2-container-registry.png" + + +class EC2ElasticIpAddress(_Compute): + _icon = "ec2-elastic-ip-address.png" + + +class EC2ImageBuilder(_Compute): + _icon = "ec2-image-builder.png" + + +class EC2Instance(_Compute): + _icon = "ec2-instance.png" + + +class EC2Instances(_Compute): + _icon = "ec2-instances.png" + + +class EC2Rescue(_Compute): + _icon = "ec2-rescue.png" + + +class EC2SpotInstance(_Compute): + _icon = "ec2-spot-instance.png" + + +class EC2(_Compute): + _icon = "ec2.png" + + +class ElasticBeanstalkApplication(_Compute): + _icon = "elastic-beanstalk-application.png" + + +class ElasticBeanstalkDeployment(_Compute): + _icon = "elastic-beanstalk-deployment.png" + + +class ElasticBeanstalk(_Compute): + _icon = "elastic-beanstalk.png" + + +class ElasticContainerServiceContainer(_Compute): + _icon = "elastic-container-service-container.png" + + +class ElasticContainerServiceService(_Compute): + _icon = "elastic-container-service-service.png" + + +class ElasticContainerService(_Compute): + _icon = "elastic-container-service.png" + + +class ElasticKubernetesService(_Compute): + _icon = "elastic-kubernetes-service.png" + + +class Fargate(_Compute): + _icon = "fargate.png" + + +class LambdaFunction(_Compute): + _icon = "lambda-function.png" + + +class Lambda(_Compute): + _icon = "lambda.png" + + +class Lightsail(_Compute): + _icon = "lightsail.png" + + +class LocalZones(_Compute): + _icon = "local-zones.png" + + +class Outposts(_Compute): + _icon = "outposts.png" + + +class ServerlessApplicationRepository(_Compute): + _icon = "serverless-application-repository.png" + + +class ThinkboxDeadline(_Compute): + _icon = "thinkbox-deadline.png" + + +class ThinkboxDraft(_Compute): + _icon = "thinkbox-draft.png" + + +class ThinkboxFrost(_Compute): + _icon = "thinkbox-frost.png" + + +class ThinkboxKrakatoa(_Compute): + _icon = "thinkbox-krakatoa.png" + + +class ThinkboxSequoia(_Compute): + _icon = "thinkbox-sequoia.png" + + +class ThinkboxStoke(_Compute): + _icon = "thinkbox-stoke.png" + + +class ThinkboxXmesh(_Compute): + _icon = "thinkbox-xmesh.png" + + +class VmwareCloudOnAWS(_Compute): + _icon = "vmware-cloud-on-aws.png" + + +class Wavelength(_Compute): + _icon = "wavelength.png" + + +# Aliases + +AutoScaling = ApplicationAutoScaling +AMI = EC2Ami +ECR = EC2ContainerRegistry +EB = ElasticBeanstalk +ECS = ElasticContainerService +EKS = ElasticKubernetesService +SAR = ServerlessApplicationRepository diff --git a/.venv/Lib/site-packages/diagrams/aws/cost.py b/.venv/Lib/site-packages/diagrams/aws/cost.py new file mode 100644 index 00000000..ba601eb7 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/cost.py @@ -0,0 +1,35 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Cost(_AWS): + _type = "cost" + _icon_dir = "resources/aws/cost" + + +class Budgets(_Cost): + _icon = "budgets.png" + + +class CostAndUsageReport(_Cost): + _icon = "cost-and-usage-report.png" + + +class CostExplorer(_Cost): + _icon = "cost-explorer.png" + + +class CostManagement(_Cost): + _icon = "cost-management.png" + + +class ReservedInstanceReporting(_Cost): + _icon = "reserved-instance-reporting.png" + + +class SavingsPlans(_Cost): + _icon = "savings-plans.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/database.py b/.venv/Lib/site-packages/diagrams/aws/database.py new file mode 100644 index 00000000..cbdb34d9 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/database.py @@ -0,0 +1,156 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Database(_AWS): + _type = "database" + _icon_dir = "resources/aws/database" + + +class AuroraInstance(_Database): + _icon = "aurora-instance.png" + + +class Aurora(_Database): + _icon = "aurora.png" + + +class DatabaseMigrationServiceDatabaseMigrationWorkflow(_Database): + _icon = "database-migration-service-database-migration-workflow.png" + + +class DatabaseMigrationService(_Database): + _icon = "database-migration-service.png" + + +class Database(_Database): + _icon = "database.png" + + +class DocumentdbMongodbCompatibility(_Database): + _icon = "documentdb-mongodb-compatibility.png" + + +class DynamodbAttribute(_Database): + _icon = "dynamodb-attribute.png" + + +class DynamodbAttributes(_Database): + _icon = "dynamodb-attributes.png" + + +class DynamodbDax(_Database): + _icon = "dynamodb-dax.png" + + +class DynamodbGlobalSecondaryIndex(_Database): + _icon = "dynamodb-global-secondary-index.png" + + +class DynamodbItem(_Database): + _icon = "dynamodb-item.png" + + +class DynamodbItems(_Database): + _icon = "dynamodb-items.png" + + +class DynamodbStreams(_Database): + _icon = "dynamodb-streams.png" + + +class DynamodbTable(_Database): + _icon = "dynamodb-table.png" + + +class Dynamodb(_Database): + _icon = "dynamodb.png" + + +class ElasticacheCacheNode(_Database): + _icon = "elasticache-cache-node.png" + + +class ElasticacheForMemcached(_Database): + _icon = "elasticache-for-memcached.png" + + +class ElasticacheForRedis(_Database): + _icon = "elasticache-for-redis.png" + + +class Elasticache(_Database): + _icon = "elasticache.png" + + +class KeyspacesManagedApacheCassandraService(_Database): + _icon = "keyspaces-managed-apache-cassandra-service.png" + + +class Neptune(_Database): + _icon = "neptune.png" + + +class QuantumLedgerDatabaseQldb(_Database): + _icon = "quantum-ledger-database-qldb.png" + + +class RDSInstance(_Database): + _icon = "rds-instance.png" + + +class RDSMariadbInstance(_Database): + _icon = "rds-mariadb-instance.png" + + +class RDSMysqlInstance(_Database): + _icon = "rds-mysql-instance.png" + + +class RDSOnVmware(_Database): + _icon = "rds-on-vmware.png" + + +class RDSOracleInstance(_Database): + _icon = "rds-oracle-instance.png" + + +class RDSPostgresqlInstance(_Database): + _icon = "rds-postgresql-instance.png" + + +class RDSSqlServerInstance(_Database): + _icon = "rds-sql-server-instance.png" + + +class RDS(_Database): + _icon = "rds.png" + + +class RedshiftDenseComputeNode(_Database): + _icon = "redshift-dense-compute-node.png" + + +class RedshiftDenseStorageNode(_Database): + _icon = "redshift-dense-storage-node.png" + + +class Redshift(_Database): + _icon = "redshift.png" + + +class Timestream(_Database): + _icon = "timestream.png" + + +# Aliases + +DMS = DatabaseMigrationService +DocumentDB = DocumentdbMongodbCompatibility +DAX = DynamodbDax +DynamodbGSI = DynamodbGlobalSecondaryIndex +DB = Database +DDB = Dynamodb +ElastiCache = Elasticache +QLDB = QuantumLedgerDatabaseQldb diff --git a/.venv/Lib/site-packages/diagrams/aws/devtools.py b/.venv/Lib/site-packages/diagrams/aws/devtools.py new file mode 100644 index 00000000..d6458e5a --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/devtools.py @@ -0,0 +1,66 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Devtools(_AWS): + _type = "devtools" + _icon_dir = "resources/aws/devtools" + + +class CloudDevelopmentKit(_Devtools): + _icon = "cloud-development-kit.png" + + +class Cloud9Resource(_Devtools): + _icon = "cloud9-resource.png" + + +class Cloud9(_Devtools): + _icon = "cloud9.png" + + +class Codeartifact(_Devtools): + _icon = "codeartifact.png" + + +class Codebuild(_Devtools): + _icon = "codebuild.png" + + +class Codecommit(_Devtools): + _icon = "codecommit.png" + + +class Codedeploy(_Devtools): + _icon = "codedeploy.png" + + +class Codepipeline(_Devtools): + _icon = "codepipeline.png" + + +class Codestar(_Devtools): + _icon = "codestar.png" + + +class CommandLineInterface(_Devtools): + _icon = "command-line-interface.png" + + +class DeveloperTools(_Devtools): + _icon = "developer-tools.png" + + +class ToolsAndSdks(_Devtools): + _icon = "tools-and-sdks.png" + + +class XRay(_Devtools): + _icon = "x-ray.png" + + +# Aliases + +CLI = CommandLineInterface +DevTools = DeveloperTools diff --git a/.venv/Lib/site-packages/diagrams/aws/enablement.py b/.venv/Lib/site-packages/diagrams/aws/enablement.py new file mode 100644 index 00000000..ec4afceb --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/enablement.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Enablement(_AWS): + _type = "enablement" + _icon_dir = "resources/aws/enablement" + + +class CustomerEnablement(_Enablement): + _icon = "customer-enablement.png" + + +class Iq(_Enablement): + _icon = "iq.png" + + +class ManagedServices(_Enablement): + _icon = "managed-services.png" + + +class ProfessionalServices(_Enablement): + _icon = "professional-services.png" + + +class Support(_Enablement): + _icon = "support.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/enduser.py b/.venv/Lib/site-packages/diagrams/aws/enduser.py new file mode 100644 index 00000000..7cea8d36 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/enduser.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Enduser(_AWS): + _type = "enduser" + _icon_dir = "resources/aws/enduser" + + +class Appstream20(_Enduser): + _icon = "appstream-2-0.png" + + +class DesktopAndAppStreaming(_Enduser): + _icon = "desktop-and-app-streaming.png" + + +class Workdocs(_Enduser): + _icon = "workdocs.png" + + +class Worklink(_Enduser): + _icon = "worklink.png" + + +class Workspaces(_Enduser): + _icon = "workspaces.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/engagement.py b/.venv/Lib/site-packages/diagrams/aws/engagement.py new file mode 100644 index 00000000..ce965b33 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/engagement.py @@ -0,0 +1,33 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Engagement(_AWS): + _type = "engagement" + _icon_dir = "resources/aws/engagement" + + +class Connect(_Engagement): + _icon = "connect.png" + + +class CustomerEngagement(_Engagement): + _icon = "customer-engagement.png" + + +class Pinpoint(_Engagement): + _icon = "pinpoint.png" + + +class SimpleEmailServiceSesEmail(_Engagement): + _icon = "simple-email-service-ses-email.png" + + +class SimpleEmailServiceSes(_Engagement): + _icon = "simple-email-service-ses.png" + + +# Aliases + +SES = SimpleEmailServiceSes diff --git a/.venv/Lib/site-packages/diagrams/aws/game.py b/.venv/Lib/site-packages/diagrams/aws/game.py new file mode 100644 index 00000000..446904f2 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/game.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Game(_AWS): + _type = "game" + _icon_dir = "resources/aws/game" + + +class GameTech(_Game): + _icon = "game-tech.png" + + +class Gamelift(_Game): + _icon = "gamelift.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/general.py b/.venv/Lib/site-packages/diagrams/aws/general.py new file mode 100644 index 00000000..dd6d4bea --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/general.py @@ -0,0 +1,109 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _General(_AWS): + _type = "general" + _icon_dir = "resources/aws/general" + + +class Client(_General): + _icon = "client.png" + + +class Disk(_General): + _icon = "disk.png" + + +class Forums(_General): + _icon = "forums.png" + + +class General(_General): + _icon = "general.png" + + +class GenericDatabase(_General): + _icon = "generic-database.png" + + +class GenericFirewall(_General): + _icon = "generic-firewall.png" + + +class GenericOfficeBuilding(_General): + _icon = "generic-office-building.png" + + +class GenericSamlToken(_General): + _icon = "generic-saml-token.png" + + +class GenericSDK(_General): + _icon = "generic-sdk.png" + + +class InternetAlt1(_General): + _icon = "internet-alt1.png" + + +class InternetAlt2(_General): + _icon = "internet-alt2.png" + + +class InternetGateway(_General): + _icon = "internet-gateway.png" + + +class Marketplace(_General): + _icon = "marketplace.png" + + +class MobileClient(_General): + _icon = "mobile-client.png" + + +class Multimedia(_General): + _icon = "multimedia.png" + + +class OfficeBuilding(_General): + _icon = "office-building.png" + + +class SamlToken(_General): + _icon = "saml-token.png" + + +class SDK(_General): + _icon = "sdk.png" + + +class SslPadlock(_General): + _icon = "ssl-padlock.png" + + +class TapeStorage(_General): + _icon = "tape-storage.png" + + +class Toolkit(_General): + _icon = "toolkit.png" + + +class TraditionalServer(_General): + _icon = "traditional-server.png" + + +class User(_General): + _icon = "user.png" + + +class Users(_General): + _icon = "users.png" + + +# Aliases + +OfficeBuilding = GenericOfficeBuilding diff --git a/.venv/Lib/site-packages/diagrams/aws/integration.py b/.venv/Lib/site-packages/diagrams/aws/integration.py new file mode 100644 index 00000000..cfe23d48 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/integration.py @@ -0,0 +1,87 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Integration(_AWS): + _type = "integration" + _icon_dir = "resources/aws/integration" + + +class ApplicationIntegration(_Integration): + _icon = "application-integration.png" + + +class Appsync(_Integration): + _icon = "appsync.png" + + +class ConsoleMobileApplication(_Integration): + _icon = "console-mobile-application.png" + + +class EventResource(_Integration): + _icon = "event-resource.png" + + +class EventbridgeCustomEventBusResource(_Integration): + _icon = "eventbridge-custom-event-bus-resource.png" + + +class EventbridgeDefaultEventBusResource(_Integration): + _icon = "eventbridge-default-event-bus-resource.png" + + +class EventbridgeSaasPartnerEventBusResource(_Integration): + _icon = "eventbridge-saas-partner-event-bus-resource.png" + + +class Eventbridge(_Integration): + _icon = "eventbridge.png" + + +class ExpressWorkflows(_Integration): + _icon = "express-workflows.png" + + +class MQ(_Integration): + _icon = "mq.png" + + +class SimpleNotificationServiceSnsEmailNotification(_Integration): + _icon = "simple-notification-service-sns-email-notification.png" + + +class SimpleNotificationServiceSnsHttpNotification(_Integration): + _icon = "simple-notification-service-sns-http-notification.png" + + +class SimpleNotificationServiceSnsTopic(_Integration): + _icon = "simple-notification-service-sns-topic.png" + + +class SimpleNotificationServiceSns(_Integration): + _icon = "simple-notification-service-sns.png" + + +class SimpleQueueServiceSqsMessage(_Integration): + _icon = "simple-queue-service-sqs-message.png" + + +class SimpleQueueServiceSqsQueue(_Integration): + _icon = "simple-queue-service-sqs-queue.png" + + +class SimpleQueueServiceSqs(_Integration): + _icon = "simple-queue-service-sqs.png" + + +class StepFunctions(_Integration): + _icon = "step-functions.png" + + +# Aliases + +SNS = SimpleNotificationServiceSns +SQS = SimpleQueueServiceSqs +SF = StepFunctions diff --git a/.venv/Lib/site-packages/diagrams/aws/iot.py b/.venv/Lib/site-packages/diagrams/aws/iot.py new file mode 100644 index 00000000..94667ef8 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/iot.py @@ -0,0 +1,258 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Iot(_AWS): + _type = "iot" + _icon_dir = "resources/aws/iot" + + +class Freertos(_Iot): + _icon = "freertos.png" + + +class InternetOfThings(_Iot): + _icon = "internet-of-things.png" + + +class Iot1Click(_Iot): + _icon = "iot-1-click.png" + + +class IotAction(_Iot): + _icon = "iot-action.png" + + +class IotActuator(_Iot): + _icon = "iot-actuator.png" + + +class IotAlexaEcho(_Iot): + _icon = "iot-alexa-echo.png" + + +class IotAlexaEnabledDevice(_Iot): + _icon = "iot-alexa-enabled-device.png" + + +class IotAlexaSkill(_Iot): + _icon = "iot-alexa-skill.png" + + +class IotAlexaVoiceService(_Iot): + _icon = "iot-alexa-voice-service.png" + + +class IotAnalyticsChannel(_Iot): + _icon = "iot-analytics-channel.png" + + +class IotAnalyticsDataSet(_Iot): + _icon = "iot-analytics-data-set.png" + + +class IotAnalyticsDataStore(_Iot): + _icon = "iot-analytics-data-store.png" + + +class IotAnalyticsNotebook(_Iot): + _icon = "iot-analytics-notebook.png" + + +class IotAnalyticsPipeline(_Iot): + _icon = "iot-analytics-pipeline.png" + + +class IotAnalytics(_Iot): + _icon = "iot-analytics.png" + + +class IotBank(_Iot): + _icon = "iot-bank.png" + + +class IotBicycle(_Iot): + _icon = "iot-bicycle.png" + + +class IotButton(_Iot): + _icon = "iot-button.png" + + +class IotCamera(_Iot): + _icon = "iot-camera.png" + + +class IotCar(_Iot): + _icon = "iot-car.png" + + +class IotCart(_Iot): + _icon = "iot-cart.png" + + +class IotCertificate(_Iot): + _icon = "iot-certificate.png" + + +class IotCoffeePot(_Iot): + _icon = "iot-coffee-pot.png" + + +class IotCore(_Iot): + _icon = "iot-core.png" + + +class IotDesiredState(_Iot): + _icon = "iot-desired-state.png" + + +class IotDeviceDefender(_Iot): + _icon = "iot-device-defender.png" + + +class IotDeviceGateway(_Iot): + _icon = "iot-device-gateway.png" + + +class IotDeviceManagement(_Iot): + _icon = "iot-device-management.png" + + +class IotDoorLock(_Iot): + _icon = "iot-door-lock.png" + + +class IotEvents(_Iot): + _icon = "iot-events.png" + + +class IotFactory(_Iot): + _icon = "iot-factory.png" + + +class IotFireTvStick(_Iot): + _icon = "iot-fire-tv-stick.png" + + +class IotFireTv(_Iot): + _icon = "iot-fire-tv.png" + + +class IotGeneric(_Iot): + _icon = "iot-generic.png" + + +class IotGreengrassConnector(_Iot): + _icon = "iot-greengrass-connector.png" + + +class IotGreengrass(_Iot): + _icon = "iot-greengrass.png" + + +class IotHardwareBoard(_Iot): + _icon = "iot-hardware-board.png" + + +class IotHouse(_Iot): + _icon = "iot-house.png" + + +class IotHttp(_Iot): + _icon = "iot-http.png" + + +class IotHttp2(_Iot): + _icon = "iot-http2.png" + + +class IotJobs(_Iot): + _icon = "iot-jobs.png" + + +class IotLambda(_Iot): + _icon = "iot-lambda.png" + + +class IotLightbulb(_Iot): + _icon = "iot-lightbulb.png" + + +class IotMedicalEmergency(_Iot): + _icon = "iot-medical-emergency.png" + + +class IotMqtt(_Iot): + _icon = "iot-mqtt.png" + + +class IotOverTheAirUpdate(_Iot): + _icon = "iot-over-the-air-update.png" + + +class IotPolicyEmergency(_Iot): + _icon = "iot-policy-emergency.png" + + +class IotPolicy(_Iot): + _icon = "iot-policy.png" + + +class IotReportedState(_Iot): + _icon = "iot-reported-state.png" + + +class IotRule(_Iot): + _icon = "iot-rule.png" + + +class IotSensor(_Iot): + _icon = "iot-sensor.png" + + +class IotServo(_Iot): + _icon = "iot-servo.png" + + +class IotShadow(_Iot): + _icon = "iot-shadow.png" + + +class IotSimulator(_Iot): + _icon = "iot-simulator.png" + + +class IotSitewise(_Iot): + _icon = "iot-sitewise.png" + + +class IotThermostat(_Iot): + _icon = "iot-thermostat.png" + + +class IotThingsGraph(_Iot): + _icon = "iot-things-graph.png" + + +class IotTopic(_Iot): + _icon = "iot-topic.png" + + +class IotTravel(_Iot): + _icon = "iot-travel.png" + + +class IotUtility(_Iot): + _icon = "iot-utility.png" + + +class IotWindfarm(_Iot): + _icon = "iot-windfarm.png" + + +# Aliases + +FreeRTOS = Freertos +IotBoard = IotHardwareBoard diff --git a/.venv/Lib/site-packages/diagrams/aws/management.py b/.venv/Lib/site-packages/diagrams/aws/management.py new file mode 100644 index 00000000..f6710686 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/management.py @@ -0,0 +1,246 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Management(_AWS): + _type = "management" + _icon_dir = "resources/aws/management" + + +class AmazonDevopsGuru(_Management): + _icon = "amazon-devops-guru.png" + + +class AmazonManagedGrafana(_Management): + _icon = "amazon-managed-grafana.png" + + +class AmazonManagedPrometheus(_Management): + _icon = "amazon-managed-prometheus.png" + + +class AmazonManagedWorkflowsApacheAirflow(_Management): + _icon = "amazon-managed-workflows-apache-airflow.png" + + +class AutoScaling(_Management): + _icon = "auto-scaling.png" + + +class Chatbot(_Management): + _icon = "chatbot.png" + + +class CloudformationChangeSet(_Management): + _icon = "cloudformation-change-set.png" + + +class CloudformationStack(_Management): + _icon = "cloudformation-stack.png" + + +class CloudformationTemplate(_Management): + _icon = "cloudformation-template.png" + + +class Cloudformation(_Management): + _icon = "cloudformation.png" + + +class Cloudtrail(_Management): + _icon = "cloudtrail.png" + + +class CloudwatchAlarm(_Management): + _icon = "cloudwatch-alarm.png" + + +class CloudwatchEventEventBased(_Management): + _icon = "cloudwatch-event-event-based.png" + + +class CloudwatchEventTimeBased(_Management): + _icon = "cloudwatch-event-time-based.png" + + +class CloudwatchLogs(_Management): + _icon = "cloudwatch-logs.png" + + +class CloudwatchRule(_Management): + _icon = "cloudwatch-rule.png" + + +class Cloudwatch(_Management): + _icon = "cloudwatch.png" + + +class Codeguru(_Management): + _icon = "codeguru.png" + + +class CommandLineInterface(_Management): + _icon = "command-line-interface.png" + + +class Config(_Management): + _icon = "config.png" + + +class ControlTower(_Management): + _icon = "control-tower.png" + + +class LicenseManager(_Management): + _icon = "license-manager.png" + + +class ManagedServices(_Management): + _icon = "managed-services.png" + + +class ManagementAndGovernance(_Management): + _icon = "management-and-governance.png" + + +class ManagementConsole(_Management): + _icon = "management-console.png" + + +class OpsworksApps(_Management): + _icon = "opsworks-apps.png" + + +class OpsworksDeployments(_Management): + _icon = "opsworks-deployments.png" + + +class OpsworksInstances(_Management): + _icon = "opsworks-instances.png" + + +class OpsworksLayers(_Management): + _icon = "opsworks-layers.png" + + +class OpsworksMonitoring(_Management): + _icon = "opsworks-monitoring.png" + + +class OpsworksPermissions(_Management): + _icon = "opsworks-permissions.png" + + +class OpsworksResources(_Management): + _icon = "opsworks-resources.png" + + +class OpsworksStack(_Management): + _icon = "opsworks-stack.png" + + +class Opsworks(_Management): + _icon = "opsworks.png" + + +class OrganizationsAccount(_Management): + _icon = "organizations-account.png" + + +class OrganizationsOrganizationalUnit(_Management): + _icon = "organizations-organizational-unit.png" + + +class Organizations(_Management): + _icon = "organizations.png" + + +class PersonalHealthDashboard(_Management): + _icon = "personal-health-dashboard.png" + + +class Proton(_Management): + _icon = "proton.png" + + +class ServiceCatalog(_Management): + _icon = "service-catalog.png" + + +class SystemsManagerAppConfig(_Management): + _icon = "systems-manager-app-config.png" + + +class SystemsManagerAutomation(_Management): + _icon = "systems-manager-automation.png" + + +class SystemsManagerDocuments(_Management): + _icon = "systems-manager-documents.png" + + +class SystemsManagerInventory(_Management): + _icon = "systems-manager-inventory.png" + + +class SystemsManagerMaintenanceWindows(_Management): + _icon = "systems-manager-maintenance-windows.png" + + +class SystemsManagerOpscenter(_Management): + _icon = "systems-manager-opscenter.png" + + +class SystemsManagerParameterStore(_Management): + _icon = "systems-manager-parameter-store.png" + + +class SystemsManagerPatchManager(_Management): + _icon = "systems-manager-patch-manager.png" + + +class SystemsManagerRunCommand(_Management): + _icon = "systems-manager-run-command.png" + + +class SystemsManagerStateManager(_Management): + _icon = "systems-manager-state-manager.png" + + +class SystemsManager(_Management): + _icon = "systems-manager.png" + + +class TrustedAdvisorChecklistCost(_Management): + _icon = "trusted-advisor-checklist-cost.png" + + +class TrustedAdvisorChecklistFaultTolerant(_Management): + _icon = "trusted-advisor-checklist-fault-tolerant.png" + + +class TrustedAdvisorChecklistPerformance(_Management): + _icon = "trusted-advisor-checklist-performance.png" + + +class TrustedAdvisorChecklistSecurity(_Management): + _icon = "trusted-advisor-checklist-security.png" + + +class TrustedAdvisorChecklist(_Management): + _icon = "trusted-advisor-checklist.png" + + +class TrustedAdvisor(_Management): + _icon = "trusted-advisor.png" + + +class WellArchitectedTool(_Management): + _icon = "well-architected-tool.png" + + +# Aliases + +SSM = SystemsManager +ParameterStore = SystemsManagerParameterStore diff --git a/.venv/Lib/site-packages/diagrams/aws/media.py b/.venv/Lib/site-packages/diagrams/aws/media.py new file mode 100644 index 00000000..52580d57 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/media.py @@ -0,0 +1,63 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Media(_AWS): + _type = "media" + _icon_dir = "resources/aws/media" + + +class ElasticTranscoder(_Media): + _icon = "elastic-transcoder.png" + + +class ElementalConductor(_Media): + _icon = "elemental-conductor.png" + + +class ElementalDelta(_Media): + _icon = "elemental-delta.png" + + +class ElementalLive(_Media): + _icon = "elemental-live.png" + + +class ElementalMediaconnect(_Media): + _icon = "elemental-mediaconnect.png" + + +class ElementalMediaconvert(_Media): + _icon = "elemental-mediaconvert.png" + + +class ElementalMedialive(_Media): + _icon = "elemental-medialive.png" + + +class ElementalMediapackage(_Media): + _icon = "elemental-mediapackage.png" + + +class ElementalMediastore(_Media): + _icon = "elemental-mediastore.png" + + +class ElementalMediatailor(_Media): + _icon = "elemental-mediatailor.png" + + +class ElementalServer(_Media): + _icon = "elemental-server.png" + + +class KinesisVideoStreams(_Media): + _icon = "kinesis-video-streams.png" + + +class MediaServices(_Media): + _icon = "media-services.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/migration.py b/.venv/Lib/site-packages/diagrams/aws/migration.py new file mode 100644 index 00000000..69d029a8 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/migration.py @@ -0,0 +1,65 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Migration(_AWS): + _type = "migration" + _icon_dir = "resources/aws/migration" + + +class ApplicationDiscoveryService(_Migration): + _icon = "application-discovery-service.png" + + +class CloudendureMigration(_Migration): + _icon = "cloudendure-migration.png" + + +class DatabaseMigrationService(_Migration): + _icon = "database-migration-service.png" + + +class DatasyncAgent(_Migration): + _icon = "datasync-agent.png" + + +class Datasync(_Migration): + _icon = "datasync.png" + + +class MigrationAndTransfer(_Migration): + _icon = "migration-and-transfer.png" + + +class MigrationHub(_Migration): + _icon = "migration-hub.png" + + +class ServerMigrationService(_Migration): + _icon = "server-migration-service.png" + + +class SnowballEdge(_Migration): + _icon = "snowball-edge.png" + + +class Snowball(_Migration): + _icon = "snowball.png" + + +class Snowmobile(_Migration): + _icon = "snowmobile.png" + + +class TransferForSftp(_Migration): + _icon = "transfer-for-sftp.png" + + +# Aliases + +ADS = ApplicationDiscoveryService +CEM = CloudendureMigration +DMS = DatabaseMigrationService +MAT = MigrationAndTransfer +SMS = ServerMigrationService diff --git a/.venv/Lib/site-packages/diagrams/aws/ml.py b/.venv/Lib/site-packages/diagrams/aws/ml.py new file mode 100644 index 00000000..95dd716b --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/ml.py @@ -0,0 +1,129 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _ML(_AWS): + _type = "ml" + _icon_dir = "resources/aws/ml" + + +class ApacheMxnetOnAWS(_ML): + _icon = "apache-mxnet-on-aws.png" + + +class AugmentedAi(_ML): + _icon = "augmented-ai.png" + + +class Bedrock(_ML): + _icon = "bedrock.png" + + +class Comprehend(_ML): + _icon = "comprehend.png" + + +class DeepLearningAmis(_ML): + _icon = "deep-learning-amis.png" + + +class DeepLearningContainers(_ML): + _icon = "deep-learning-containers.png" + + +class Deepcomposer(_ML): + _icon = "deepcomposer.png" + + +class Deeplens(_ML): + _icon = "deeplens.png" + + +class Deepracer(_ML): + _icon = "deepracer.png" + + +class ElasticInference(_ML): + _icon = "elastic-inference.png" + + +class Forecast(_ML): + _icon = "forecast.png" + + +class FraudDetector(_ML): + _icon = "fraud-detector.png" + + +class Kendra(_ML): + _icon = "kendra.png" + + +class Lex(_ML): + _icon = "lex.png" + + +class MachineLearning(_ML): + _icon = "machine-learning.png" + + +class Personalize(_ML): + _icon = "personalize.png" + + +class Polly(_ML): + _icon = "polly.png" + + +class RekognitionImage(_ML): + _icon = "rekognition-image.png" + + +class RekognitionVideo(_ML): + _icon = "rekognition-video.png" + + +class Rekognition(_ML): + _icon = "rekognition.png" + + +class SagemakerGroundTruth(_ML): + _icon = "sagemaker-ground-truth.png" + + +class SagemakerModel(_ML): + _icon = "sagemaker-model.png" + + +class SagemakerNotebook(_ML): + _icon = "sagemaker-notebook.png" + + +class SagemakerTrainingJob(_ML): + _icon = "sagemaker-training-job.png" + + +class Sagemaker(_ML): + _icon = "sagemaker.png" + + +class TensorflowOnAWS(_ML): + _icon = "tensorflow-on-aws.png" + + +class Textract(_ML): + _icon = "textract.png" + + +class Transcribe(_ML): + _icon = "transcribe.png" + + +class Translate(_ML): + _icon = "translate.png" + + +# Aliases + +DLC = DeepLearningContainers diff --git a/.venv/Lib/site-packages/diagrams/aws/mobile.py b/.venv/Lib/site-packages/diagrams/aws/mobile.py new file mode 100644 index 00000000..68973033 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/mobile.py @@ -0,0 +1,39 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Mobile(_AWS): + _type = "mobile" + _icon_dir = "resources/aws/mobile" + + +class Amplify(_Mobile): + _icon = "amplify.png" + + +class APIGatewayEndpoint(_Mobile): + _icon = "api-gateway-endpoint.png" + + +class APIGateway(_Mobile): + _icon = "api-gateway.png" + + +class Appsync(_Mobile): + _icon = "appsync.png" + + +class DeviceFarm(_Mobile): + _icon = "device-farm.png" + + +class Mobile(_Mobile): + _icon = "mobile.png" + + +class Pinpoint(_Mobile): + _icon = "pinpoint.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/network.py b/.venv/Lib/site-packages/diagrams/aws/network.py new file mode 100644 index 00000000..4f072756 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/network.py @@ -0,0 +1,181 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Network(_AWS): + _type = "network" + _icon_dir = "resources/aws/network" + + +class APIGatewayEndpoint(_Network): + _icon = "api-gateway-endpoint.png" + + +class APIGateway(_Network): + _icon = "api-gateway.png" + + +class AppMesh(_Network): + _icon = "app-mesh.png" + + +class ClientVpn(_Network): + _icon = "client-vpn.png" + + +class CloudMap(_Network): + _icon = "cloud-map.png" + + +class CloudFrontDownloadDistribution(_Network): + _icon = "cloudfront-download-distribution.png" + + +class CloudFrontEdgeLocation(_Network): + _icon = "cloudfront-edge-location.png" + + +class CloudFrontStreamingDistribution(_Network): + _icon = "cloudfront-streaming-distribution.png" + + +class CloudFront(_Network): + _icon = "cloudfront.png" + + +class DirectConnect(_Network): + _icon = "direct-connect.png" + + +class ElasticLoadBalancing(_Network): + _icon = "elastic-load-balancing.png" + + +class ElbApplicationLoadBalancer(_Network): + _icon = "elb-application-load-balancer.png" + + +class ElbClassicLoadBalancer(_Network): + _icon = "elb-classic-load-balancer.png" + + +class ElbNetworkLoadBalancer(_Network): + _icon = "elb-network-load-balancer.png" + + +class Endpoint(_Network): + _icon = "endpoint.png" + + +class GlobalAccelerator(_Network): + _icon = "global-accelerator.png" + + +class InternetGateway(_Network): + _icon = "internet-gateway.png" + + +class Nacl(_Network): + _icon = "nacl.png" + + +class NATGateway(_Network): + _icon = "nat-gateway.png" + + +class NetworkFirewall(_Network): + _icon = "network-firewall.png" + + +class NetworkingAndContentDelivery(_Network): + _icon = "networking-and-content-delivery.png" + + +class PrivateSubnet(_Network): + _icon = "private-subnet.png" + + +class Privatelink(_Network): + _icon = "privatelink.png" + + +class PublicSubnet(_Network): + _icon = "public-subnet.png" + + +class Route53HostedZone(_Network): + _icon = "route-53-hosted-zone.png" + + +class Route53(_Network): + _icon = "route-53.png" + + +class RouteTable(_Network): + _icon = "route-table.png" + + +class SiteToSiteVpn(_Network): + _icon = "site-to-site-vpn.png" + + +class TransitGatewayAttachment(_Network): + _icon = "transit-gateway-attachment.png" + + +class TransitGateway(_Network): + _icon = "transit-gateway.png" + + +class VPCCustomerGateway(_Network): + _icon = "vpc-customer-gateway.png" + + +class VPCElasticNetworkAdapter(_Network): + _icon = "vpc-elastic-network-adapter.png" + + +class VPCElasticNetworkInterface(_Network): + _icon = "vpc-elastic-network-interface.png" + + +class VPCFlowLogs(_Network): + _icon = "vpc-flow-logs.png" + + +class VPCPeering(_Network): + _icon = "vpc-peering.png" + + +class VPCRouter(_Network): + _icon = "vpc-router.png" + + +class VPCTrafficMirroring(_Network): + _icon = "vpc-traffic-mirroring.png" + + +class VPC(_Network): + _icon = "vpc.png" + + +class VpnConnection(_Network): + _icon = "vpn-connection.png" + + +class VpnGateway(_Network): + _icon = "vpn-gateway.png" + + +# Aliases + +CF = CloudFront +ELB = ElasticLoadBalancing +ALB = ElbApplicationLoadBalancer +CLB = ElbClassicLoadBalancer +NLB = ElbNetworkLoadBalancer +GAX = GlobalAccelerator +IGW = InternetGateway +TGW = TransitGateway +TGWAttach = TransitGatewayAttachment diff --git a/.venv/Lib/site-packages/diagrams/aws/quantum.py b/.venv/Lib/site-packages/diagrams/aws/quantum.py new file mode 100644 index 00000000..9a0b184a --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/quantum.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Quantum(_AWS): + _type = "quantum" + _icon_dir = "resources/aws/quantum" + + +class Braket(_Quantum): + _icon = "braket.png" + + +class QuantumTechnologies(_Quantum): + _icon = "quantum-technologies.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/robotics.py b/.venv/Lib/site-packages/diagrams/aws/robotics.py new file mode 100644 index 00000000..3d53a2ab --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/robotics.py @@ -0,0 +1,35 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Robotics(_AWS): + _type = "robotics" + _icon_dir = "resources/aws/robotics" + + +class RobomakerCloudExtensionRos(_Robotics): + _icon = "robomaker-cloud-extension-ros.png" + + +class RobomakerDevelopmentEnvironment(_Robotics): + _icon = "robomaker-development-environment.png" + + +class RobomakerFleetManagement(_Robotics): + _icon = "robomaker-fleet-management.png" + + +class RobomakerSimulator(_Robotics): + _icon = "robomaker-simulator.png" + + +class Robomaker(_Robotics): + _icon = "robomaker.png" + + +class Robotics(_Robotics): + _icon = "robotics.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/satellite.py b/.venv/Lib/site-packages/diagrams/aws/satellite.py new file mode 100644 index 00000000..16c7472a --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/satellite.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Satellite(_AWS): + _type = "satellite" + _icon_dir = "resources/aws/satellite" + + +class GroundStation(_Satellite): + _icon = "ground-station.png" + + +class Satellite(_Satellite): + _icon = "satellite.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/aws/security.py b/.venv/Lib/site-packages/diagrams/aws/security.py new file mode 100644 index 00000000..72082d11 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/security.py @@ -0,0 +1,179 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Security(_AWS): + _type = "security" + _icon_dir = "resources/aws/security" + + +class AdConnector(_Security): + _icon = "ad-connector.png" + + +class Artifact(_Security): + _icon = "artifact.png" + + +class CertificateAuthority(_Security): + _icon = "certificate-authority.png" + + +class CertificateManager(_Security): + _icon = "certificate-manager.png" + + +class CloudDirectory(_Security): + _icon = "cloud-directory.png" + + +class Cloudhsm(_Security): + _icon = "cloudhsm.png" + + +class Cognito(_Security): + _icon = "cognito.png" + + +class Detective(_Security): + _icon = "detective.png" + + +class DirectoryService(_Security): + _icon = "directory-service.png" + + +class FirewallManager(_Security): + _icon = "firewall-manager.png" + + +class Guardduty(_Security): + _icon = "guardduty.png" + + +class IdentityAndAccessManagementIamAccessAnalyzer(_Security): + _icon = "identity-and-access-management-iam-access-analyzer.png" + + +class IdentityAndAccessManagementIamAddOn(_Security): + _icon = "identity-and-access-management-iam-add-on.png" + + +class IdentityAndAccessManagementIamAWSStsAlternate(_Security): + _icon = "identity-and-access-management-iam-aws-sts-alternate.png" + + +class IdentityAndAccessManagementIamAWSSts(_Security): + _icon = "identity-and-access-management-iam-aws-sts.png" + + +class IdentityAndAccessManagementIamDataEncryptionKey(_Security): + _icon = "identity-and-access-management-iam-data-encryption-key.png" + + +class IdentityAndAccessManagementIamEncryptedData(_Security): + _icon = "identity-and-access-management-iam-encrypted-data.png" + + +class IdentityAndAccessManagementIamLongTermSecurityCredential(_Security): + _icon = "identity-and-access-management-iam-long-term-security-credential.png" + + +class IdentityAndAccessManagementIamMfaToken(_Security): + _icon = "identity-and-access-management-iam-mfa-token.png" + + +class IdentityAndAccessManagementIamPermissions(_Security): + _icon = "identity-and-access-management-iam-permissions.png" + + +class IdentityAndAccessManagementIamRole(_Security): + _icon = "identity-and-access-management-iam-role.png" + + +class IdentityAndAccessManagementIamTemporarySecurityCredential(_Security): + _icon = "identity-and-access-management-iam-temporary-security-credential.png" + + +class IdentityAndAccessManagementIam(_Security): + _icon = "identity-and-access-management-iam.png" + + +class InspectorAgent(_Security): + _icon = "inspector-agent.png" + + +class Inspector(_Security): + _icon = "inspector.png" + + +class KeyManagementService(_Security): + _icon = "key-management-service.png" + + +class Macie(_Security): + _icon = "macie.png" + + +class ManagedMicrosoftAd(_Security): + _icon = "managed-microsoft-ad.png" + + +class ResourceAccessManager(_Security): + _icon = "resource-access-manager.png" + + +class SecretsManager(_Security): + _icon = "secrets-manager.png" + + +class SecurityHubFinding(_Security): + _icon = "security-hub-finding.png" + + +class SecurityHub(_Security): + _icon = "security-hub.png" + + +class SecurityIdentityAndCompliance(_Security): + _icon = "security-identity-and-compliance.png" + + +class ShieldAdvanced(_Security): + _icon = "shield-advanced.png" + + +class Shield(_Security): + _icon = "shield.png" + + +class SimpleAd(_Security): + _icon = "simple-ad.png" + + +class SingleSignOn(_Security): + _icon = "single-sign-on.png" + + +class WAFFilteringRule(_Security): + _icon = "waf-filtering-rule.png" + + +class WAF(_Security): + _icon = "waf.png" + + +# Aliases + +ACM = CertificateManager +CloudHSM = Cloudhsm +DS = DirectoryService +FMS = FirewallManager +IAMAccessAnalyzer = IdentityAndAccessManagementIamAccessAnalyzer +IAMAWSSts = IdentityAndAccessManagementIamAWSSts +IAMPermissions = IdentityAndAccessManagementIamPermissions +IAMRole = IdentityAndAccessManagementIamRole +IAM = IdentityAndAccessManagementIam +KMS = KeyManagementService +RAM = ResourceAccessManager diff --git a/.venv/Lib/site-packages/diagrams/aws/storage.py b/.venv/Lib/site-packages/diagrams/aws/storage.py new file mode 100644 index 00000000..a573d1e2 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/aws/storage.py @@ -0,0 +1,141 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _AWS + + +class _Storage(_AWS): + _type = "storage" + _icon_dir = "resources/aws/storage" + + +class Backup(_Storage): + _icon = "backup.png" + + +class CloudendureDisasterRecovery(_Storage): + _icon = "cloudendure-disaster-recovery.png" + + +class EFSInfrequentaccessPrimaryBg(_Storage): + _icon = "efs-infrequentaccess-primary-bg.png" + + +class EFSStandardPrimaryBg(_Storage): + _icon = "efs-standard-primary-bg.png" + + +class ElasticBlockStoreEBSSnapshot(_Storage): + _icon = "elastic-block-store-ebs-snapshot.png" + + +class ElasticBlockStoreEBSVolume(_Storage): + _icon = "elastic-block-store-ebs-volume.png" + + +class ElasticBlockStoreEBS(_Storage): + _icon = "elastic-block-store-ebs.png" + + +class ElasticFileSystemEFSFileSystem(_Storage): + _icon = "elastic-file-system-efs-file-system.png" + + +class ElasticFileSystemEFS(_Storage): + _icon = "elastic-file-system-efs.png" + + +class FsxForLustre(_Storage): + _icon = "fsx-for-lustre.png" + + +class FsxForWindowsFileServer(_Storage): + _icon = "fsx-for-windows-file-server.png" + + +class Fsx(_Storage): + _icon = "fsx.png" + + +class MultipleVolumesResource(_Storage): + _icon = "multiple-volumes-resource.png" + + +class S3AccessPoints(_Storage): + _icon = "s3-access-points.png" + + +class S3GlacierArchive(_Storage): + _icon = "s3-glacier-archive.png" + + +class S3GlacierVault(_Storage): + _icon = "s3-glacier-vault.png" + + +class S3Glacier(_Storage): + _icon = "s3-glacier.png" + + +class S3ObjectLambdaAccessPoints(_Storage): + _icon = "s3-object-lambda-access-points.png" + + +class SimpleStorageServiceS3BucketWithObjects(_Storage): + _icon = "simple-storage-service-s3-bucket-with-objects.png" + + +class SimpleStorageServiceS3Bucket(_Storage): + _icon = "simple-storage-service-s3-bucket.png" + + +class SimpleStorageServiceS3Object(_Storage): + _icon = "simple-storage-service-s3-object.png" + + +class SimpleStorageServiceS3(_Storage): + _icon = "simple-storage-service-s3.png" + + +class SnowFamilySnowballImportExport(_Storage): + _icon = "snow-family-snowball-import-export.png" + + +class SnowballEdge(_Storage): + _icon = "snowball-edge.png" + + +class Snowball(_Storage): + _icon = "snowball.png" + + +class Snowmobile(_Storage): + _icon = "snowmobile.png" + + +class StorageGatewayCachedVolume(_Storage): + _icon = "storage-gateway-cached-volume.png" + + +class StorageGatewayNonCachedVolume(_Storage): + _icon = "storage-gateway-non-cached-volume.png" + + +class StorageGatewayVirtualTapeLibrary(_Storage): + _icon = "storage-gateway-virtual-tape-library.png" + + +class StorageGateway(_Storage): + _icon = "storage-gateway.png" + + +class Storage(_Storage): + _icon = "storage.png" + + +# Aliases + +CDR = CloudendureDisasterRecovery +EBS = ElasticBlockStoreEBS +EFS = ElasticFileSystemEFS +FSx = Fsx +S3 = SimpleStorageServiceS3 diff --git a/.venv/Lib/site-packages/diagrams/azure/__init__.py b/.venv/Lib/site-packages/diagrams/azure/__init__.py new file mode 100644 index 00000000..5f7c2b29 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/__init__.py @@ -0,0 +1,16 @@ +""" +Azure provides a set of services for Microsoft Azure provider. +""" + +from diagrams import Node + + +class _Azure(Node): + _provider = "azure" + _icon_dir = "resources/azure" + + fontcolor = "#ffffff" + + +class Azure(_Azure): + _icon = "azure.png" diff --git a/.venv/Lib/site-packages/diagrams/azure/analytics.py b/.venv/Lib/site-packages/diagrams/azure/analytics.py new file mode 100644 index 00000000..216b3582 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/analytics.py @@ -0,0 +1,59 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Analytics(_Azure): + _type = "analytics" + _icon_dir = "resources/azure/analytics" + + +class AnalysisServices(_Analytics): + _icon = "analysis-services.png" + + +class DataExplorerClusters(_Analytics): + _icon = "data-explorer-clusters.png" + + +class DataFactories(_Analytics): + _icon = "data-factories.png" + + +class DataLakeAnalytics(_Analytics): + _icon = "data-lake-analytics.png" + + +class DataLakeStoreGen1(_Analytics): + _icon = "data-lake-store-gen1.png" + + +class Databricks(_Analytics): + _icon = "databricks.png" + + +class EventHubClusters(_Analytics): + _icon = "event-hub-clusters.png" + + +class EventHubs(_Analytics): + _icon = "event-hubs.png" + + +class Hdinsightclusters(_Analytics): + _icon = "hdinsightclusters.png" + + +class LogAnalyticsWorkspaces(_Analytics): + _icon = "log-analytics-workspaces.png" + + +class StreamAnalyticsJobs(_Analytics): + _icon = "stream-analytics-jobs.png" + + +class SynapseAnalytics(_Analytics): + _icon = "synapse-analytics.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/compute.py b/.venv/Lib/site-packages/diagrams/azure/compute.py new file mode 100644 index 00000000..810f9237 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/compute.py @@ -0,0 +1,139 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Compute(_Azure): + _type = "compute" + _icon_dir = "resources/azure/compute" + + +class AppServices(_Compute): + _icon = "app-services.png" + + +class AutomanagedVM(_Compute): + _icon = "automanaged-vm.png" + + +class AvailabilitySets(_Compute): + _icon = "availability-sets.png" + + +class BatchAccounts(_Compute): + _icon = "batch-accounts.png" + + +class CitrixVirtualDesktopsEssentials(_Compute): + _icon = "citrix-virtual-desktops-essentials.png" + + +class CloudServicesClassic(_Compute): + _icon = "cloud-services-classic.png" + + +class CloudServices(_Compute): + _icon = "cloud-services.png" + + +class CloudsimpleVirtualMachines(_Compute): + _icon = "cloudsimple-virtual-machines.png" + + +class ContainerApps(_Compute): + _icon = "container-apps.png" + + +class ContainerInstances(_Compute): + _icon = "container-instances.png" + + +class ContainerRegistries(_Compute): + _icon = "container-registries.png" + + +class DiskEncryptionSets(_Compute): + _icon = "disk-encryption-sets.png" + + +class DiskSnapshots(_Compute): + _icon = "disk-snapshots.png" + + +class Disks(_Compute): + _icon = "disks.png" + + +class FunctionApps(_Compute): + _icon = "function-apps.png" + + +class ImageDefinitions(_Compute): + _icon = "image-definitions.png" + + +class ImageVersions(_Compute): + _icon = "image-versions.png" + + +class KubernetesServices(_Compute): + _icon = "kubernetes-services.png" + + +class MeshApplications(_Compute): + _icon = "mesh-applications.png" + + +class OsImages(_Compute): + _icon = "os-images.png" + + +class SAPHANAOnAzure(_Compute): + _icon = "sap-hana-on-azure.png" + + +class ServiceFabricClusters(_Compute): + _icon = "service-fabric-clusters.png" + + +class SharedImageGalleries(_Compute): + _icon = "shared-image-galleries.png" + + +class SpringCloud(_Compute): + _icon = "spring-cloud.png" + + +class VMClassic(_Compute): + _icon = "vm-classic.png" + + +class VMImages(_Compute): + _icon = "vm-images.png" + + +class VMLinux(_Compute): + _icon = "vm-linux.png" + + +class VMScaleSet(_Compute): + _icon = "vm-scale-set.png" + + +class VMWindows(_Compute): + _icon = "vm-windows.png" + + +class VM(_Compute): + _icon = "vm.png" + + +class Workspaces(_Compute): + _icon = "workspaces.png" + + +# Aliases + +ACR = ContainerRegistries +AKS = KubernetesServices +VMSS = VMScaleSet diff --git a/.venv/Lib/site-packages/diagrams/azure/database.py b/.venv/Lib/site-packages/diagrams/azure/database.py new file mode 100644 index 00000000..a9127a85 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/database.py @@ -0,0 +1,107 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Database(_Azure): + _type = "database" + _icon_dir = "resources/azure/database" + + +class BlobStorage(_Database): + _icon = "blob-storage.png" + + +class CacheForRedis(_Database): + _icon = "cache-for-redis.png" + + +class CosmosDb(_Database): + _icon = "cosmos-db.png" + + +class DataExplorerClusters(_Database): + _icon = "data-explorer-clusters.png" + + +class DataFactory(_Database): + _icon = "data-factory.png" + + +class DataLake(_Database): + _icon = "data-lake.png" + + +class DatabaseForMariadbServers(_Database): + _icon = "database-for-mariadb-servers.png" + + +class DatabaseForMysqlServers(_Database): + _icon = "database-for-mysql-servers.png" + + +class DatabaseForPostgresqlServers(_Database): + _icon = "database-for-postgresql-servers.png" + + +class ElasticDatabasePools(_Database): + _icon = "elastic-database-pools.png" + + +class ElasticJobAgents(_Database): + _icon = "elastic-job-agents.png" + + +class InstancePools(_Database): + _icon = "instance-pools.png" + + +class ManagedDatabases(_Database): + _icon = "managed-databases.png" + + +class SQLDatabases(_Database): + _icon = "sql-databases.png" + + +class SQLDatawarehouse(_Database): + _icon = "sql-datawarehouse.png" + + +class SQLManagedInstances(_Database): + _icon = "sql-managed-instances.png" + + +class SQLServerStretchDatabases(_Database): + _icon = "sql-server-stretch-databases.png" + + +class SQLServers(_Database): + _icon = "sql-servers.png" + + +class SQLVM(_Database): + _icon = "sql-vm.png" + + +class SQL(_Database): + _icon = "sql.png" + + +class SsisLiftAndShiftIr(_Database): + _icon = "ssis-lift-and-shift-ir.png" + + +class SynapseAnalytics(_Database): + _icon = "synapse-analytics.png" + + +class VirtualClusters(_Database): + _icon = "virtual-clusters.png" + + +class VirtualDatacenter(_Database): + _icon = "virtual-datacenter.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/devops.py b/.venv/Lib/site-packages/diagrams/azure/devops.py new file mode 100644 index 00000000..eee9f253 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/devops.py @@ -0,0 +1,47 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Devops(_Azure): + _type = "devops" + _icon_dir = "resources/azure/devops" + + +class ApplicationInsights(_Devops): + _icon = "application-insights.png" + + +class Artifacts(_Devops): + _icon = "artifacts.png" + + +class Boards(_Devops): + _icon = "boards.png" + + +class Devops(_Devops): + _icon = "devops.png" + + +class DevtestLabs(_Devops): + _icon = "devtest-labs.png" + + +class LabServices(_Devops): + _icon = "lab-services.png" + + +class Pipelines(_Devops): + _icon = "pipelines.png" + + +class Repos(_Devops): + _icon = "repos.png" + + +class TestPlans(_Devops): + _icon = "test-plans.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/general.py b/.venv/Lib/site-packages/diagrams/azure/general.py new file mode 100644 index 00000000..8958e975 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/general.py @@ -0,0 +1,115 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _General(_Azure): + _type = "general" + _icon_dir = "resources/azure/general" + + +class Allresources(_General): + _icon = "allresources.png" + + +class Azurehome(_General): + _icon = "azurehome.png" + + +class Developertools(_General): + _icon = "developertools.png" + + +class Helpsupport(_General): + _icon = "helpsupport.png" + + +class Information(_General): + _icon = "information.png" + + +class Managementgroups(_General): + _icon = "managementgroups.png" + + +class Marketplace(_General): + _icon = "marketplace.png" + + +class Quickstartcenter(_General): + _icon = "quickstartcenter.png" + + +class Recent(_General): + _icon = "recent.png" + + +class Reservations(_General): + _icon = "reservations.png" + + +class Resource(_General): + _icon = "resource.png" + + +class Resourcegroups(_General): + _icon = "resourcegroups.png" + + +class Servicehealth(_General): + _icon = "servicehealth.png" + + +class Shareddashboard(_General): + _icon = "shareddashboard.png" + + +class Subscriptions(_General): + _icon = "subscriptions.png" + + +class Support(_General): + _icon = "support.png" + + +class Supportrequests(_General): + _icon = "supportrequests.png" + + +class Tag(_General): + _icon = "tag.png" + + +class Tags(_General): + _icon = "tags.png" + + +class Templates(_General): + _icon = "templates.png" + + +class Twousericon(_General): + _icon = "twousericon.png" + + +class Userhealthicon(_General): + _icon = "userhealthicon.png" + + +class Usericon(_General): + _icon = "usericon.png" + + +class Userprivacy(_General): + _icon = "userprivacy.png" + + +class Userresource(_General): + _icon = "userresource.png" + + +class Whatsnew(_General): + _icon = "whatsnew.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/identity.py b/.venv/Lib/site-packages/diagrams/azure/identity.py new file mode 100644 index 00000000..a7913728 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/identity.py @@ -0,0 +1,71 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Identity(_Azure): + _type = "identity" + _icon_dir = "resources/azure/identity" + + +class AccessReview(_Identity): + _icon = "access-review.png" + + +class ActiveDirectoryConnectHealth(_Identity): + _icon = "active-directory-connect-health.png" + + +class ActiveDirectory(_Identity): + _icon = "active-directory.png" + + +class ADB2C(_Identity): + _icon = "ad-b2c.png" + + +class ADDomainServices(_Identity): + _icon = "ad-domain-services.png" + + +class ADIdentityProtection(_Identity): + _icon = "ad-identity-protection.png" + + +class ADPrivilegedIdentityManagement(_Identity): + _icon = "ad-privileged-identity-management.png" + + +class AppRegistrations(_Identity): + _icon = "app-registrations.png" + + +class ConditionalAccess(_Identity): + _icon = "conditional-access.png" + + +class EnterpriseApplications(_Identity): + _icon = "enterprise-applications.png" + + +class Groups(_Identity): + _icon = "groups.png" + + +class IdentityGovernance(_Identity): + _icon = "identity-governance.png" + + +class InformationProtection(_Identity): + _icon = "information-protection.png" + + +class ManagedIdentities(_Identity): + _icon = "managed-identities.png" + + +class Users(_Identity): + _icon = "users.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/integration.py b/.venv/Lib/site-packages/diagrams/azure/integration.py new file mode 100644 index 00000000..369cd97b --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/integration.py @@ -0,0 +1,87 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Integration(_Azure): + _type = "integration" + _icon_dir = "resources/azure/integration" + + +class APIForFhir(_Integration): + _icon = "api-for-fhir.png" + + +class APIManagement(_Integration): + _icon = "api-management.png" + + +class AppConfiguration(_Integration): + _icon = "app-configuration.png" + + +class DataCatalog(_Integration): + _icon = "data-catalog.png" + + +class EventGridDomains(_Integration): + _icon = "event-grid-domains.png" + + +class EventGridSubscriptions(_Integration): + _icon = "event-grid-subscriptions.png" + + +class EventGridTopics(_Integration): + _icon = "event-grid-topics.png" + + +class IntegrationAccounts(_Integration): + _icon = "integration-accounts.png" + + +class IntegrationServiceEnvironments(_Integration): + _icon = "integration-service-environments.png" + + +class LogicAppsCustomConnector(_Integration): + _icon = "logic-apps-custom-connector.png" + + +class LogicApps(_Integration): + _icon = "logic-apps.png" + + +class PartnerTopic(_Integration): + _icon = "partner-topic.png" + + +class SendgridAccounts(_Integration): + _icon = "sendgrid-accounts.png" + + +class ServiceBusRelays(_Integration): + _icon = "service-bus-relays.png" + + +class ServiceBus(_Integration): + _icon = "service-bus.png" + + +class ServiceCatalogManagedApplicationDefinitions(_Integration): + _icon = "service-catalog-managed-application-definitions.png" + + +class SoftwareAsAService(_Integration): + _icon = "software-as-a-service.png" + + +class StorsimpleDeviceManagers(_Integration): + _icon = "storsimple-device-managers.png" + + +class SystemTopic(_Integration): + _icon = "system-topic.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/iot.py b/.venv/Lib/site-packages/diagrams/azure/iot.py new file mode 100644 index 00000000..5d6dce8e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/iot.py @@ -0,0 +1,51 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Iot(_Azure): + _type = "iot" + _icon_dir = "resources/azure/iot" + + +class DeviceProvisioningServices(_Iot): + _icon = "device-provisioning-services.png" + + +class DigitalTwins(_Iot): + _icon = "digital-twins.png" + + +class IotCentralApplications(_Iot): + _icon = "iot-central-applications.png" + + +class IotHubSecurity(_Iot): + _icon = "iot-hub-security.png" + + +class IotHub(_Iot): + _icon = "iot-hub.png" + + +class Maps(_Iot): + _icon = "maps.png" + + +class Sphere(_Iot): + _icon = "sphere.png" + + +class TimeSeriesInsightsEnvironments(_Iot): + _icon = "time-series-insights-environments.png" + + +class TimeSeriesInsightsEventsSources(_Iot): + _icon = "time-series-insights-events-sources.png" + + +class Windows10IotCoreServices(_Iot): + _icon = "windows-10-iot-core-services.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/migration.py b/.venv/Lib/site-packages/diagrams/azure/migration.py new file mode 100644 index 00000000..74f573f7 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/migration.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Migration(_Azure): + _type = "migration" + _icon_dir = "resources/azure/migration" + + +class DataBoxEdge(_Migration): + _icon = "data-box-edge.png" + + +class DataBox(_Migration): + _icon = "data-box.png" + + +class DatabaseMigrationServices(_Migration): + _icon = "database-migration-services.png" + + +class MigrationProjects(_Migration): + _icon = "migration-projects.png" + + +class RecoveryServicesVaults(_Migration): + _icon = "recovery-services-vaults.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/ml.py b/.venv/Lib/site-packages/diagrams/azure/ml.py new file mode 100644 index 00000000..eb0c474e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/ml.py @@ -0,0 +1,51 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Ml(_Azure): + _type = "ml" + _icon_dir = "resources/azure/ml" + + +class AzureOpenAI(_Ml): + _icon = "azure-open-ai.png" + + +class AzureSpeedToText(_Ml): + _icon = "azure-speed-to-text.png" + + +class BatchAI(_Ml): + _icon = "batch-ai.png" + + +class BotServices(_Ml): + _icon = "bot-services.png" + + +class CognitiveServices(_Ml): + _icon = "cognitive-services.png" + + +class GenomicsAccounts(_Ml): + _icon = "genomics-accounts.png" + + +class MachineLearningServiceWorkspaces(_Ml): + _icon = "machine-learning-service-workspaces.png" + + +class MachineLearningStudioWebServicePlans(_Ml): + _icon = "machine-learning-studio-web-service-plans.png" + + +class MachineLearningStudioWebServices(_Ml): + _icon = "machine-learning-studio-web-services.png" + + +class MachineLearningStudioWorkspaces(_Ml): + _icon = "machine-learning-studio-workspaces.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/mobile.py b/.venv/Lib/site-packages/diagrams/azure/mobile.py new file mode 100644 index 00000000..e2eb20a5 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/mobile.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Mobile(_Azure): + _type = "mobile" + _icon_dir = "resources/azure/mobile" + + +class AppServiceMobile(_Mobile): + _icon = "app-service-mobile.png" + + +class MobileEngagement(_Mobile): + _icon = "mobile-engagement.png" + + +class NotificationHubs(_Mobile): + _icon = "notification-hubs.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/monitor.py b/.venv/Lib/site-packages/diagrams/azure/monitor.py new file mode 100644 index 00000000..9dcc5bb0 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/monitor.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Monitor(_Azure): + _type = "monitor" + _icon_dir = "resources/azure/monitor" + + +class ChangeAnalysis(_Monitor): + _icon = "change-analysis.png" + + +class Logs(_Monitor): + _icon = "logs.png" + + +class Metrics(_Monitor): + _icon = "metrics.png" + + +class Monitor(_Monitor): + _icon = "monitor.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/network.py b/.venv/Lib/site-packages/diagrams/azure/network.py new file mode 100644 index 00000000..d55ad3cc --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/network.py @@ -0,0 +1,123 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Network(_Azure): + _type = "network" + _icon_dir = "resources/azure/network" + + +class ApplicationGateway(_Network): + _icon = "application-gateway.png" + + +class ApplicationSecurityGroups(_Network): + _icon = "application-security-groups.png" + + +class CDNProfiles(_Network): + _icon = "cdn-profiles.png" + + +class Connections(_Network): + _icon = "connections.png" + + +class DDOSProtectionPlans(_Network): + _icon = "ddos-protection-plans.png" + + +class DNSPrivateZones(_Network): + _icon = "dns-private-zones.png" + + +class DNSZones(_Network): + _icon = "dns-zones.png" + + +class ExpressrouteCircuits(_Network): + _icon = "expressroute-circuits.png" + + +class Firewall(_Network): + _icon = "firewall.png" + + +class FrontDoors(_Network): + _icon = "front-doors.png" + + +class LoadBalancers(_Network): + _icon = "load-balancers.png" + + +class LocalNetworkGateways(_Network): + _icon = "local-network-gateways.png" + + +class NetworkInterfaces(_Network): + _icon = "network-interfaces.png" + + +class NetworkSecurityGroupsClassic(_Network): + _icon = "network-security-groups-classic.png" + + +class NetworkWatcher(_Network): + _icon = "network-watcher.png" + + +class OnPremisesDataGateways(_Network): + _icon = "on-premises-data-gateways.png" + + +class PrivateEndpoint(_Network): + _icon = "private-endpoint.png" + + +class PublicIpAddresses(_Network): + _icon = "public-ip-addresses.png" + + +class ReservedIpAddressesClassic(_Network): + _icon = "reserved-ip-addresses-classic.png" + + +class RouteFilters(_Network): + _icon = "route-filters.png" + + +class RouteTables(_Network): + _icon = "route-tables.png" + + +class ServiceEndpointPolicies(_Network): + _icon = "service-endpoint-policies.png" + + +class Subnets(_Network): + _icon = "subnets.png" + + +class TrafficManagerProfiles(_Network): + _icon = "traffic-manager-profiles.png" + + +class VirtualNetworkClassic(_Network): + _icon = "virtual-network-classic.png" + + +class VirtualNetworkGateways(_Network): + _icon = "virtual-network-gateways.png" + + +class VirtualNetworks(_Network): + _icon = "virtual-networks.png" + + +class VirtualWans(_Network): + _icon = "virtual-wans.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/security.py b/.venv/Lib/site-packages/diagrams/azure/security.py new file mode 100644 index 00000000..f78fb51d --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/security.py @@ -0,0 +1,39 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Security(_Azure): + _type = "security" + _icon_dir = "resources/azure/security" + + +class ApplicationSecurityGroups(_Security): + _icon = "application-security-groups.png" + + +class ConditionalAccess(_Security): + _icon = "conditional-access.png" + + +class Defender(_Security): + _icon = "defender.png" + + +class ExtendedSecurityUpdates(_Security): + _icon = "extended-security-updates.png" + + +class KeyVaults(_Security): + _icon = "key-vaults.png" + + +class SecurityCenter(_Security): + _icon = "security-center.png" + + +class Sentinel(_Security): + _icon = "sentinel.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/storage.py b/.venv/Lib/site-packages/diagrams/azure/storage.py new file mode 100644 index 00000000..5dcf1f2c --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/storage.py @@ -0,0 +1,75 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Storage(_Azure): + _type = "storage" + _icon_dir = "resources/azure/storage" + + +class ArchiveStorage(_Storage): + _icon = "archive-storage.png" + + +class Azurefxtedgefiler(_Storage): + _icon = "azurefxtedgefiler.png" + + +class BlobStorage(_Storage): + _icon = "blob-storage.png" + + +class DataBoxEdgeDataBoxGateway(_Storage): + _icon = "data-box-edge-data-box-gateway.png" + + +class DataBox(_Storage): + _icon = "data-box.png" + + +class DataLakeStorage(_Storage): + _icon = "data-lake-storage.png" + + +class GeneralStorage(_Storage): + _icon = "general-storage.png" + + +class NetappFiles(_Storage): + _icon = "netapp-files.png" + + +class QueuesStorage(_Storage): + _icon = "queues-storage.png" + + +class StorageAccountsClassic(_Storage): + _icon = "storage-accounts-classic.png" + + +class StorageAccounts(_Storage): + _icon = "storage-accounts.png" + + +class StorageExplorer(_Storage): + _icon = "storage-explorer.png" + + +class StorageSyncServices(_Storage): + _icon = "storage-sync-services.png" + + +class StorsimpleDataManagers(_Storage): + _icon = "storsimple-data-managers.png" + + +class StorsimpleDeviceManagers(_Storage): + _icon = "storsimple-device-managers.png" + + +class TableStorage(_Storage): + _icon = "table-storage.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/azure/web.py b/.venv/Lib/site-packages/diagrams/azure/web.py new file mode 100644 index 00000000..b99a61cc --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/azure/web.py @@ -0,0 +1,51 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Azure + + +class _Web(_Azure): + _type = "web" + _icon_dir = "resources/azure/web" + + +class APIConnections(_Web): + _icon = "api-connections.png" + + +class AppServiceCertificates(_Web): + _icon = "app-service-certificates.png" + + +class AppServiceDomains(_Web): + _icon = "app-service-domains.png" + + +class AppServiceEnvironments(_Web): + _icon = "app-service-environments.png" + + +class AppServicePlans(_Web): + _icon = "app-service-plans.png" + + +class AppServices(_Web): + _icon = "app-services.png" + + +class MediaServices(_Web): + _icon = "media-services.png" + + +class NotificationHubNamespaces(_Web): + _icon = "notification-hub-namespaces.png" + + +class Search(_Web): + _icon = "search.png" + + +class Signalr(_Web): + _icon = "signalr.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/base/__init__.py b/.venv/Lib/site-packages/diagrams/base/__init__.py new file mode 100644 index 00000000..8180b7c4 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/base/__init__.py @@ -0,0 +1,12 @@ +""" +Base provides a set of general services for backend infrastructure. +""" + +from diagrams import Node + + +class _Base(Node): + _provider = "base" + _icon_dir = "resources/base" + + fontcolor = "#ffffff" diff --git a/.venv/Lib/site-packages/diagrams/c4/__init__.py b/.venv/Lib/site-packages/diagrams/c4/__init__.py new file mode 100644 index 00000000..9bc2bf98 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/c4/__init__.py @@ -0,0 +1,128 @@ +""" +A set of nodes and edges to visualize software architecture using the C4 model. +""" + +import html +import textwrap + +from diagrams import Cluster, Edge, Node + + +def _format_node_label(name, key, description): + """Create a graphviz label string for a C4 node""" + title = f'{html.escape(name)}
' + subtitle = f'[{html.escape(key)}]
' if key else "" + text = f'
{_format_description(description)}' if description else "" + return f"<{title}{subtitle}{text}>" + + +def _format_description(description): + """ + Formats the description string so it fits into the C4 nodes. + + It line-breaks the description so it fits onto exactly three lines. If there are more + than three lines, all further lines are discarded and "..." inserted on the last line to + indicate that it was shortened. This will also html-escape the description so it can + safely be included in a HTML label. + """ + wrapper = textwrap.TextWrapper(width=40, max_lines=3) + lines = [html.escape(line) for line in wrapper.wrap(description)] + # fill up with empty lines so it is always three + lines += [""] * (3 - len(lines)) + return "
".join(lines) + + +def _format_edge_label(description): + """Create a graphviz label string for a C4 edge""" + wrapper = textwrap.TextWrapper(width=24, max_lines=3) + lines = [html.escape(line) for line in wrapper.wrap(description)] + text = "
".join(lines) + return f'<{text}>' + + +def C4Node(name, technology="", description="", type="Container", **kwargs): + key = f"{type}: {technology}" if technology else type + node_attributes = { + "label": _format_node_label(name, key, description), + "labelloc": "c", + "shape": "rect", + "width": "2.6", + "height": "1.6", + "fixedsize": "true", + "style": "filled", + "fillcolor": "dodgerblue3", + "fontcolor": "white", + } + # collapse boxes to a smaller form if they don't have a description + if not description: + node_attributes.update({"width": "2", "height": "1"}) + node_attributes.update(kwargs) + return Node(**node_attributes) + + +def Container(name, technology="", description="", **kwargs): + container_attributes = { + "name": name, + "technology": technology, + "description": description, + "type": "Container", + } + container_attributes.update(kwargs) + return C4Node(**container_attributes) + + +def Database(name, technology="", description="", **kwargs): + database_attributes = { + "name": name, + "technology": technology, + "description": description, + "type": "Database", + "shape": "cylinder", + "labelloc": "b", + } + database_attributes.update(kwargs) + return C4Node(**database_attributes) + + +def System(name, description="", external=False, **kwargs): + system_attributes = { + "name": name, + "description": description, + "type": "External System" if external else "System", + "fillcolor": "gray60" if external else "dodgerblue4", + } + system_attributes.update(kwargs) + return C4Node(**system_attributes) + + +def Person(name, description="", external=False, **kwargs): + person_attributes = { + "name": name, + "description": description, + "type": "External Person" if external else "Person", + "fillcolor": "gray60" if external else "dodgerblue4", + "style": "rounded,filled", + } + person_attributes.update(kwargs) + return C4Node(**person_attributes) + + +def SystemBoundary(name, **kwargs): + graph_attributes = { + "label": html.escape(name), + "bgcolor": "white", + "margin": "16", + "style": "dashed", + } + graph_attributes.update(kwargs) + return Cluster(name, graph_attr=graph_attributes) + + +def Relationship(label="", **kwargs): + edge_attributes = { + "style": "dashed", + "color": "gray60", + "label": _format_edge_label(label) if label else "", + } + edge_attributes.update(kwargs) + return Edge(**edge_attributes) diff --git a/.venv/Lib/site-packages/diagrams/custom/__init__.py b/.venv/Lib/site-packages/diagrams/custom/__init__.py new file mode 100644 index 00000000..9845932d --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/custom/__init__.py @@ -0,0 +1,20 @@ +""" +Custom provides the possibility of load an image to be presented as a node. +""" + +from diagrams import Node + + +class Custom(Node): + _provider = "custom" + _type = "custom" + _icon_dir = None + + fontcolor = "#ffffff" + + def _load_icon(self): + return self._icon + + def __init__(self, label, icon_path, *args, **kwargs): + self._icon = icon_path + super().__init__(label, *args, **kwargs) diff --git a/.venv/Lib/site-packages/diagrams/digitalocean/__init__.py b/.venv/Lib/site-packages/diagrams/digitalocean/__init__.py new file mode 100644 index 00000000..e9557d1b --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/digitalocean/__init__.py @@ -0,0 +1,16 @@ +""" +DigitalOcean provides a set of services for DigitalOcean provider. +""" + +from diagrams import Node + + +class _DigitalOcean(Node): + _provider = "digitalocean" + _icon_dir = "resources/digitalocean" + + fontcolor = "#ffffff" + + +class DigitalOcean(_DigitalOcean): + _icon = "digitalocean.png" diff --git a/.venv/Lib/site-packages/diagrams/digitalocean/compute.py b/.venv/Lib/site-packages/diagrams/digitalocean/compute.py new file mode 100644 index 00000000..2f732004 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/digitalocean/compute.py @@ -0,0 +1,43 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _DigitalOcean + + +class _Compute(_DigitalOcean): + _type = "compute" + _icon_dir = "resources/digitalocean/compute" + + +class Containers(_Compute): + _icon = "containers.png" + + +class Docker(_Compute): + _icon = "docker.png" + + +class DropletConnect(_Compute): + _icon = "droplet-connect.png" + + +class DropletSnapshot(_Compute): + _icon = "droplet-snapshot.png" + + +class Droplet(_Compute): + _icon = "droplet.png" + + +class K8SCluster(_Compute): + _icon = "k8s-cluster.png" + + +class K8SNodePool(_Compute): + _icon = "k8s-node-pool.png" + + +class K8SNode(_Compute): + _icon = "k8s-node.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/digitalocean/database.py b/.venv/Lib/site-packages/diagrams/digitalocean/database.py new file mode 100644 index 00000000..8ddc18c9 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/digitalocean/database.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _DigitalOcean + + +class _Database(_DigitalOcean): + _type = "database" + _icon_dir = "resources/digitalocean/database" + + +class DbaasPrimaryStandbyMore(_Database): + _icon = "dbaas-primary-standby-more.png" + + +class DbaasPrimary(_Database): + _icon = "dbaas-primary.png" + + +class DbaasReadOnly(_Database): + _icon = "dbaas-read-only.png" + + +class DbaasStandby(_Database): + _icon = "dbaas-standby.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/digitalocean/network.py b/.venv/Lib/site-packages/diagrams/digitalocean/network.py new file mode 100644 index 00000000..901529ca --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/digitalocean/network.py @@ -0,0 +1,47 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _DigitalOcean + + +class _Network(_DigitalOcean): + _type = "network" + _icon_dir = "resources/digitalocean/network" + + +class Certificate(_Network): + _icon = "certificate.png" + + +class DomainRegistration(_Network): + _icon = "domain-registration.png" + + +class Domain(_Network): + _icon = "domain.png" + + +class Firewall(_Network): + _icon = "firewall.png" + + +class FloatingIp(_Network): + _icon = "floating-ip.png" + + +class InternetGateway(_Network): + _icon = "internet-gateway.png" + + +class LoadBalancer(_Network): + _icon = "load-balancer.png" + + +class ManagedVpn(_Network): + _icon = "managed-vpn.png" + + +class Vpc(_Network): + _icon = "vpc.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/digitalocean/storage.py b/.venv/Lib/site-packages/diagrams/digitalocean/storage.py new file mode 100644 index 00000000..a2c5bc9e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/digitalocean/storage.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _DigitalOcean + + +class _Storage(_DigitalOcean): + _type = "storage" + _icon_dir = "resources/digitalocean/storage" + + +class Folder(_Storage): + _icon = "folder.png" + + +class Space(_Storage): + _icon = "space.png" + + +class VolumeSnapshot(_Storage): + _icon = "volume-snapshot.png" + + +class Volume(_Storage): + _icon = "volume.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/elastic/__init__.py b/.venv/Lib/site-packages/diagrams/elastic/__init__.py new file mode 100644 index 00000000..17b86961 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/elastic/__init__.py @@ -0,0 +1,16 @@ +""" +Elastic provides a set of general elastic services. +""" + +from diagrams import Node + + +class _Elastic(Node): + _provider = "elastic" + _icon_dir = "resources/elastic" + + fontcolor = "#ffffff" + + +class Elastic(_Elastic): + _icon = "elastic.png" diff --git a/.venv/Lib/site-packages/diagrams/elastic/agent.py b/.venv/Lib/site-packages/diagrams/elastic/agent.py new file mode 100644 index 00000000..518d728d --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/elastic/agent.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Elastic + + +class _Agent(_Elastic): + _type = "agent" + _icon_dir = "resources/elastic/agent" + + +class Agent(_Agent): + _icon = "agent.png" + + +class Endpoint(_Agent): + _icon = "endpoint.png" + + +class Fleet(_Agent): + _icon = "fleet.png" + + +class Integrations(_Agent): + _icon = "integrations.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/elastic/beats.py b/.venv/Lib/site-packages/diagrams/elastic/beats.py new file mode 100644 index 00000000..f4a59600 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/elastic/beats.py @@ -0,0 +1,43 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Elastic + + +class _Beats(_Elastic): + _type = "beats" + _icon_dir = "resources/elastic/beats" + + +class APM(_Beats): + _icon = "apm.png" + + +class Auditbeat(_Beats): + _icon = "auditbeat.png" + + +class Filebeat(_Beats): + _icon = "filebeat.png" + + +class Functionbeat(_Beats): + _icon = "functionbeat.png" + + +class Heartbeat(_Beats): + _icon = "heartbeat.png" + + +class Metricbeat(_Beats): + _icon = "metricbeat.png" + + +class Packetbeat(_Beats): + _icon = "packetbeat.png" + + +class Winlogbeat(_Beats): + _icon = "winlogbeat.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/elastic/elasticsearch.py b/.venv/Lib/site-packages/diagrams/elastic/elasticsearch.py new file mode 100644 index 00000000..6234eb9c --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/elastic/elasticsearch.py @@ -0,0 +1,71 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Elastic + + +class _Elasticsearch(_Elastic): + _type = "elasticsearch" + _icon_dir = "resources/elastic/elasticsearch" + + +class Alerting(_Elasticsearch): + _icon = "alerting.png" + + +class Beats(_Elasticsearch): + _icon = "beats.png" + + +class Elasticsearch(_Elasticsearch): + _icon = "elasticsearch.png" + + +class Kibana(_Elasticsearch): + _icon = "kibana.png" + + +class LogstashPipeline(_Elasticsearch): + _icon = "logstash-pipeline.png" + + +class Logstash(_Elasticsearch): + _icon = "logstash.png" + + +class MachineLearning(_Elasticsearch): + _icon = "machine-learning.png" + + +class MapServices(_Elasticsearch): + _icon = "map-services.png" + + +class Maps(_Elasticsearch): + _icon = "maps.png" + + +class Monitoring(_Elasticsearch): + _icon = "monitoring.png" + + +class SearchableSnapshots(_Elasticsearch): + _icon = "searchable-snapshots.png" + + +class SecuritySettings(_Elasticsearch): + _icon = "security-settings.png" + + +class SQL(_Elasticsearch): + _icon = "sql.png" + + +class Stack(_Elasticsearch): + _icon = "stack.png" + + +# Aliases + +ElasticSearch = Elasticsearch +LogStash = Logstash +ML = MachineLearning diff --git a/.venv/Lib/site-packages/diagrams/elastic/enterprisesearch.py b/.venv/Lib/site-packages/diagrams/elastic/enterprisesearch.py new file mode 100644 index 00000000..cde3ca60 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/elastic/enterprisesearch.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Elastic + + +class _Enterprisesearch(_Elastic): + _type = "enterprisesearch" + _icon_dir = "resources/elastic/enterprisesearch" + + +class AppSearch(_Enterprisesearch): + _icon = "app-search.png" + + +class Crawler(_Enterprisesearch): + _icon = "crawler.png" + + +class EnterpriseSearch(_Enterprisesearch): + _icon = "enterprise-search.png" + + +class SiteSearch(_Enterprisesearch): + _icon = "site-search.png" + + +class WorkplaceSearch(_Enterprisesearch): + _icon = "workplace-search.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/elastic/observability.py b/.venv/Lib/site-packages/diagrams/elastic/observability.py new file mode 100644 index 00000000..3001340d --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/elastic/observability.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Elastic + + +class _Observability(_Elastic): + _type = "observability" + _icon_dir = "resources/elastic/observability" + + +class APM(_Observability): + _icon = "apm.png" + + +class Logs(_Observability): + _icon = "logs.png" + + +class Metrics(_Observability): + _icon = "metrics.png" + + +class Observability(_Observability): + _icon = "observability.png" + + +class Uptime(_Observability): + _icon = "uptime.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/elastic/orchestration.py b/.venv/Lib/site-packages/diagrams/elastic/orchestration.py new file mode 100644 index 00000000..99084cc7 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/elastic/orchestration.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Elastic + + +class _Orchestration(_Elastic): + _type = "orchestration" + _icon_dir = "resources/elastic/orchestration" + + +class ECE(_Orchestration): + _icon = "ece.png" + + +class ECK(_Orchestration): + _icon = "eck.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/elastic/saas.py b/.venv/Lib/site-packages/diagrams/elastic/saas.py new file mode 100644 index 00000000..4821d62f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/elastic/saas.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Elastic + + +class _Saas(_Elastic): + _type = "saas" + _icon_dir = "resources/elastic/saas" + + +class Cloud(_Saas): + _icon = "cloud.png" + + +class Elastic(_Saas): + _icon = "elastic.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/elastic/security.py b/.venv/Lib/site-packages/diagrams/elastic/security.py new file mode 100644 index 00000000..bf907c32 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/elastic/security.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Elastic + + +class _Security(_Elastic): + _type = "security" + _icon_dir = "resources/elastic/security" + + +class Endpoint(_Security): + _icon = "endpoint.png" + + +class Security(_Security): + _icon = "security.png" + + +class SIEM(_Security): + _icon = "siem.png" + + +class Xdr(_Security): + _icon = "xdr.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/firebase/__init__.py b/.venv/Lib/site-packages/diagrams/firebase/__init__.py new file mode 100644 index 00000000..11c65802 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/firebase/__init__.py @@ -0,0 +1,16 @@ +""" +Firebase provides a set of services for Firebase provider. +""" + +from diagrams import Node + + +class _Firebase(Node): + _provider = "firebase" + _icon_dir = "resources/firebase" + + fontcolor = "#ffffff" + + +class Firebase(_Firebase): + _icon = "firebase.png" diff --git a/.venv/Lib/site-packages/diagrams/firebase/base.py b/.venv/Lib/site-packages/diagrams/firebase/base.py new file mode 100644 index 00000000..a7c9531c --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/firebase/base.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Firebase + + +class _Base(_Firebase): + _type = "base" + _icon_dir = "resources/firebase/base" + + +class Firebase(_Base): + _icon = "firebase.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/firebase/develop.py b/.venv/Lib/site-packages/diagrams/firebase/develop.py new file mode 100644 index 00000000..27426e51 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/firebase/develop.py @@ -0,0 +1,39 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Firebase + + +class _Develop(_Firebase): + _type = "develop" + _icon_dir = "resources/firebase/develop" + + +class Authentication(_Develop): + _icon = "authentication.png" + + +class Firestore(_Develop): + _icon = "firestore.png" + + +class Functions(_Develop): + _icon = "functions.png" + + +class Hosting(_Develop): + _icon = "hosting.png" + + +class MLKit(_Develop): + _icon = "ml-kit.png" + + +class RealtimeDatabase(_Develop): + _icon = "realtime-database.png" + + +class Storage(_Develop): + _icon = "storage.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/firebase/extentions.py b/.venv/Lib/site-packages/diagrams/firebase/extentions.py new file mode 100644 index 00000000..9d5926c2 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/firebase/extentions.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Firebase + + +class _Extentions(_Firebase): + _type = "extentions" + _icon_dir = "resources/firebase/extentions" + + +class Extensions(_Extentions): + _icon = "extensions.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/firebase/grow.py b/.venv/Lib/site-packages/diagrams/firebase/grow.py new file mode 100644 index 00000000..b015dd3f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/firebase/grow.py @@ -0,0 +1,45 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Firebase + + +class _Grow(_Firebase): + _type = "grow" + _icon_dir = "resources/firebase/grow" + + +class ABTesting(_Grow): + _icon = "ab-testing.png" + + +class AppIndexing(_Grow): + _icon = "app-indexing.png" + + +class DynamicLinks(_Grow): + _icon = "dynamic-links.png" + + +class InAppMessaging(_Grow): + _icon = "in-app-messaging.png" + + +class Invites(_Grow): + _icon = "invites.png" + + +class Messaging(_Grow): + _icon = "messaging.png" + + +class Predictions(_Grow): + _icon = "predictions.png" + + +class RemoteConfig(_Grow): + _icon = "remote-config.png" + + +# Aliases + +FCM = Messaging diff --git a/.venv/Lib/site-packages/diagrams/firebase/quality.py b/.venv/Lib/site-packages/diagrams/firebase/quality.py new file mode 100644 index 00000000..227a4c35 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/firebase/quality.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Firebase + + +class _Quality(_Firebase): + _type = "quality" + _icon_dir = "resources/firebase/quality" + + +class AppDistribution(_Quality): + _icon = "app-distribution.png" + + +class CrashReporting(_Quality): + _icon = "crash-reporting.png" + + +class Crashlytics(_Quality): + _icon = "crashlytics.png" + + +class PerformanceMonitoring(_Quality): + _icon = "performance-monitoring.png" + + +class TestLab(_Quality): + _icon = "test-lab.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gcp/__init__.py b/.venv/Lib/site-packages/diagrams/gcp/__init__.py new file mode 100644 index 00000000..2ca41aed --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/__init__.py @@ -0,0 +1,16 @@ +""" +GCP provides a set of services for Google Cloud Platform provider. +""" + +from diagrams import Node + + +class _GCP(Node): + _provider = "gcp" + _icon_dir = "resources/gcp" + + fontcolor = "#2d3436" + + +class GCP(_GCP): + _icon = "gcp.png" diff --git a/.venv/Lib/site-packages/diagrams/gcp/analytics.py b/.venv/Lib/site-packages/diagrams/gcp/analytics.py new file mode 100644 index 00000000..25395466 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/analytics.py @@ -0,0 +1,54 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _Analytics(_GCP): + _type = "analytics" + _icon_dir = "resources/gcp/analytics" + + +class Bigquery(_Analytics): + _icon = "bigquery.png" + + +class Composer(_Analytics): + _icon = "composer.png" + + +class DataCatalog(_Analytics): + _icon = "data-catalog.png" + + +class DataFusion(_Analytics): + _icon = "data-fusion.png" + + +class Dataflow(_Analytics): + _icon = "dataflow.png" + + +class Datalab(_Analytics): + _icon = "datalab.png" + + +class Dataprep(_Analytics): + _icon = "dataprep.png" + + +class Dataproc(_Analytics): + _icon = "dataproc.png" + + +class Genomics(_Analytics): + _icon = "genomics.png" + + +class Pubsub(_Analytics): + _icon = "pubsub.png" + + +# Aliases + +BigQuery = Bigquery +PubSub = Pubsub diff --git a/.venv/Lib/site-packages/diagrams/gcp/api.py b/.venv/Lib/site-packages/diagrams/gcp/api.py new file mode 100644 index 00000000..45ec2e09 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/api.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _API(_GCP): + _type = "api" + _icon_dir = "resources/gcp/api" + + +class APIGateway(_API): + _icon = "api-gateway.png" + + +class Apigee(_API): + _icon = "apigee.png" + + +class Endpoints(_API): + _icon = "endpoints.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gcp/compute.py b/.venv/Lib/site-packages/diagrams/gcp/compute.py new file mode 100644 index 00000000..6225de03 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/compute.py @@ -0,0 +1,48 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _Compute(_GCP): + _type = "compute" + _icon_dir = "resources/gcp/compute" + + +class AppEngine(_Compute): + _icon = "app-engine.png" + + +class ComputeEngine(_Compute): + _icon = "compute-engine.png" + + +class ContainerOptimizedOS(_Compute): + _icon = "container-optimized-os.png" + + +class Functions(_Compute): + _icon = "functions.png" + + +class GKEOnPrem(_Compute): + _icon = "gke-on-prem.png" + + +class GPU(_Compute): + _icon = "gpu.png" + + +class KubernetesEngine(_Compute): + _icon = "kubernetes-engine.png" + + +class Run(_Compute): + _icon = "run.png" + + +# Aliases + +GAE = AppEngine +GCF = Functions +GCE = ComputeEngine +GKE = KubernetesEngine diff --git a/.venv/Lib/site-packages/diagrams/gcp/database.py b/.venv/Lib/site-packages/diagrams/gcp/database.py new file mode 100644 index 00000000..36851ae4 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/database.py @@ -0,0 +1,37 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _Database(_GCP): + _type = "database" + _icon_dir = "resources/gcp/database" + + +class Bigtable(_Database): + _icon = "bigtable.png" + + +class Datastore(_Database): + _icon = "datastore.png" + + +class Firestore(_Database): + _icon = "firestore.png" + + +class Memorystore(_Database): + _icon = "memorystore.png" + + +class Spanner(_Database): + _icon = "spanner.png" + + +class SQL(_Database): + _icon = "sql.png" + + +# Aliases + +BigTable = Bigtable diff --git a/.venv/Lib/site-packages/diagrams/gcp/devtools.py b/.venv/Lib/site-packages/diagrams/gcp/devtools.py new file mode 100644 index 00000000..a22d209e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/devtools.py @@ -0,0 +1,73 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _Devtools(_GCP): + _type = "devtools" + _icon_dir = "resources/gcp/devtools" + + +class Build(_Devtools): + _icon = "build.png" + + +class CodeForIntellij(_Devtools): + _icon = "code-for-intellij.png" + + +class Code(_Devtools): + _icon = "code.png" + + +class ContainerRegistry(_Devtools): + _icon = "container-registry.png" + + +class GradleAppEnginePlugin(_Devtools): + _icon = "gradle-app-engine-plugin.png" + + +class IdePlugins(_Devtools): + _icon = "ide-plugins.png" + + +class MavenAppEnginePlugin(_Devtools): + _icon = "maven-app-engine-plugin.png" + + +class Scheduler(_Devtools): + _icon = "scheduler.png" + + +class SDK(_Devtools): + _icon = "sdk.png" + + +class SourceRepositories(_Devtools): + _icon = "source-repositories.png" + + +class Tasks(_Devtools): + _icon = "tasks.png" + + +class TestLab(_Devtools): + _icon = "test-lab.png" + + +class ToolsForEclipse(_Devtools): + _icon = "tools-for-eclipse.png" + + +class ToolsForPowershell(_Devtools): + _icon = "tools-for-powershell.png" + + +class ToolsForVisualStudio(_Devtools): + _icon = "tools-for-visual-studio.png" + + +# Aliases + +GCR = ContainerRegistry diff --git a/.venv/Lib/site-packages/diagrams/gcp/iot.py b/.venv/Lib/site-packages/diagrams/gcp/iot.py new file mode 100644 index 00000000..9a792cd6 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/iot.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _Iot(_GCP): + _type = "iot" + _icon_dir = "resources/gcp/iot" + + +class IotCore(_Iot): + _icon = "iot-core.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gcp/migration.py b/.venv/Lib/site-packages/diagrams/gcp/migration.py new file mode 100644 index 00000000..8929cf71 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/migration.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _Migration(_GCP): + _type = "migration" + _icon_dir = "resources/gcp/migration" + + +class TransferAppliance(_Migration): + _icon = "transfer-appliance.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gcp/ml.py b/.venv/Lib/site-packages/diagrams/gcp/ml.py new file mode 100644 index 00000000..080067db --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/ml.py @@ -0,0 +1,100 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _ML(_GCP): + _type = "ml" + _icon_dir = "resources/gcp/ml" + + +class AdvancedSolutionsLab(_ML): + _icon = "advanced-solutions-lab.png" + + +class AIHub(_ML): + _icon = "ai-hub.png" + + +class AIPlatformDataLabelingService(_ML): + _icon = "ai-platform-data-labeling-service.png" + + +class AIPlatform(_ML): + _icon = "ai-platform.png" + + +class AutomlNaturalLanguage(_ML): + _icon = "automl-natural-language.png" + + +class AutomlTables(_ML): + _icon = "automl-tables.png" + + +class AutomlTranslation(_ML): + _icon = "automl-translation.png" + + +class AutomlVideoIntelligence(_ML): + _icon = "automl-video-intelligence.png" + + +class AutomlVision(_ML): + _icon = "automl-vision.png" + + +class Automl(_ML): + _icon = "automl.png" + + +class DialogFlowEnterpriseEdition(_ML): + _icon = "dialog-flow-enterprise-edition.png" + + +class InferenceAPI(_ML): + _icon = "inference-api.png" + + +class JobsAPI(_ML): + _icon = "jobs-api.png" + + +class NaturalLanguageAPI(_ML): + _icon = "natural-language-api.png" + + +class RecommendationsAI(_ML): + _icon = "recommendations-ai.png" + + +class SpeechToText(_ML): + _icon = "speech-to-text.png" + + +class TextToSpeech(_ML): + _icon = "text-to-speech.png" + + +class TPU(_ML): + _icon = "tpu.png" + + +class TranslationAPI(_ML): + _icon = "translation-api.png" + + +class VideoIntelligenceAPI(_ML): + _icon = "video-intelligence-api.png" + + +class VisionAPI(_ML): + _icon = "vision-api.png" + + +# Aliases + +AutoML = Automl +NLAPI = NaturalLanguageAPI +STT = SpeechToText +TTS = TextToSpeech diff --git a/.venv/Lib/site-packages/diagrams/gcp/network.py b/.venv/Lib/site-packages/diagrams/gcp/network.py new file mode 100644 index 00000000..1863b169 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/network.py @@ -0,0 +1,81 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _Network(_GCP): + _type = "network" + _icon_dir = "resources/gcp/network" + + +class Armor(_Network): + _icon = "armor.png" + + +class CDN(_Network): + _icon = "cdn.png" + + +class DedicatedInterconnect(_Network): + _icon = "dedicated-interconnect.png" + + +class DNS(_Network): + _icon = "dns.png" + + +class ExternalIpAddresses(_Network): + _icon = "external-ip-addresses.png" + + +class FirewallRules(_Network): + _icon = "firewall-rules.png" + + +class LoadBalancing(_Network): + _icon = "load-balancing.png" + + +class NAT(_Network): + _icon = "nat.png" + + +class Network(_Network): + _icon = "network.png" + + +class PartnerInterconnect(_Network): + _icon = "partner-interconnect.png" + + +class PremiumNetworkTier(_Network): + _icon = "premium-network-tier.png" + + +class Router(_Network): + _icon = "router.png" + + +class Routes(_Network): + _icon = "routes.png" + + +class StandardNetworkTier(_Network): + _icon = "standard-network-tier.png" + + +class TrafficDirector(_Network): + _icon = "traffic-director.png" + + +class VirtualPrivateCloud(_Network): + _icon = "virtual-private-cloud.png" + + +class VPN(_Network): + _icon = "vpn.png" + + +# Aliases + +VPC = VirtualPrivateCloud diff --git a/.venv/Lib/site-packages/diagrams/gcp/operations.py b/.venv/Lib/site-packages/diagrams/gcp/operations.py new file mode 100644 index 00000000..8e7a2b4a --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/operations.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _Operations(_GCP): + _type = "operations" + _icon_dir = "resources/gcp/operations" + + +class Logging(_Operations): + _icon = "logging.png" + + +class Monitoring(_Operations): + _icon = "monitoring.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gcp/security.py b/.venv/Lib/site-packages/diagrams/gcp/security.py new file mode 100644 index 00000000..4b6ff5d3 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/security.py @@ -0,0 +1,38 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _Security(_GCP): + _type = "security" + _icon_dir = "resources/gcp/security" + + +class Iam(_Security): + _icon = "iam.png" + + +class IAP(_Security): + _icon = "iap.png" + + +class KeyManagementService(_Security): + _icon = "key-management-service.png" + + +class ResourceManager(_Security): + _icon = "resource-manager.png" + + +class SecurityCommandCenter(_Security): + _icon = "security-command-center.png" + + +class SecurityScanner(_Security): + _icon = "security-scanner.png" + + +# Aliases + +KMS = KeyManagementService +SCC = SecurityCommandCenter diff --git a/.venv/Lib/site-packages/diagrams/gcp/storage.py b/.venv/Lib/site-packages/diagrams/gcp/storage.py new file mode 100644 index 00000000..d6549567 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gcp/storage.py @@ -0,0 +1,25 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GCP + + +class _Storage(_GCP): + _type = "storage" + _icon_dir = "resources/gcp/storage" + + +class Filestore(_Storage): + _icon = "filestore.png" + + +class PersistentDisk(_Storage): + _icon = "persistent-disk.png" + + +class Storage(_Storage): + _icon = "storage.png" + + +# Aliases + +GCS = Storage diff --git a/.venv/Lib/site-packages/diagrams/generic/__init__.py b/.venv/Lib/site-packages/diagrams/generic/__init__.py new file mode 100644 index 00000000..de86d4f5 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/generic/__init__.py @@ -0,0 +1,16 @@ +""" +Generic provides the possibility of load an image to be presented as a node. +""" + +from diagrams import Node + + +class _Generic(Node): + provider = "generic" + _icon_dir = "resources/generic" + + fontcolor = "#ffffff" + + +class Generic(_Generic): + _icon = "generic.png" diff --git a/.venv/Lib/site-packages/diagrams/generic/blank.py b/.venv/Lib/site-packages/diagrams/generic/blank.py new file mode 100644 index 00000000..d50410b1 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/generic/blank.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Generic + + +class _Blank(_Generic): + _type = "blank" + _icon_dir = "resources/generic/blank" + + +class Blank(_Blank): + _icon = "blank.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/generic/compute.py b/.venv/Lib/site-packages/diagrams/generic/compute.py new file mode 100644 index 00000000..436c641e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/generic/compute.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Generic + + +class _Compute(_Generic): + _type = "compute" + _icon_dir = "resources/generic/compute" + + +class Rack(_Compute): + _icon = "rack.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/generic/database.py b/.venv/Lib/site-packages/diagrams/generic/database.py new file mode 100644 index 00000000..eb54d4ee --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/generic/database.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Generic + + +class _Database(_Generic): + _type = "database" + _icon_dir = "resources/generic/database" + + +class SQL(_Database): + _icon = "sql.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/generic/device.py b/.venv/Lib/site-packages/diagrams/generic/device.py new file mode 100644 index 00000000..29745b8e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/generic/device.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Generic + + +class _Device(_Generic): + _type = "device" + _icon_dir = "resources/generic/device" + + +class Mobile(_Device): + _icon = "mobile.png" + + +class Tablet(_Device): + _icon = "tablet.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/generic/network.py b/.venv/Lib/site-packages/diagrams/generic/network.py new file mode 100644 index 00000000..40367ef0 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/generic/network.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Generic + + +class _Network(_Generic): + _type = "network" + _icon_dir = "resources/generic/network" + + +class Firewall(_Network): + _icon = "firewall.png" + + +class Router(_Network): + _icon = "router.png" + + +class Subnet(_Network): + _icon = "subnet.png" + + +class Switch(_Network): + _icon = "switch.png" + + +class VPN(_Network): + _icon = "vpn.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/generic/os.py b/.venv/Lib/site-packages/diagrams/generic/os.py new file mode 100644 index 00000000..cf0e9804 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/generic/os.py @@ -0,0 +1,51 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Generic + + +class _Os(_Generic): + _type = "os" + _icon_dir = "resources/generic/os" + + +class Android(_Os): + _icon = "android.png" + + +class Centos(_Os): + _icon = "centos.png" + + +class Debian(_Os): + _icon = "debian.png" + + +class IOS(_Os): + _icon = "ios.png" + + +class LinuxGeneral(_Os): + _icon = "linux-general.png" + + +class Raspbian(_Os): + _icon = "raspbian.png" + + +class RedHat(_Os): + _icon = "red-hat.png" + + +class Suse(_Os): + _icon = "suse.png" + + +class Ubuntu(_Os): + _icon = "ubuntu.png" + + +class Windows(_Os): + _icon = "windows.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/generic/place.py b/.venv/Lib/site-packages/diagrams/generic/place.py new file mode 100644 index 00000000..ec316069 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/generic/place.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Generic + + +class _Place(_Generic): + _type = "place" + _icon_dir = "resources/generic/place" + + +class Datacenter(_Place): + _icon = "datacenter.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/generic/storage.py b/.venv/Lib/site-packages/diagrams/generic/storage.py new file mode 100644 index 00000000..9e58cefe --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/generic/storage.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Generic + + +class _Storage(_Generic): + _type = "storage" + _icon_dir = "resources/generic/storage" + + +class Storage(_Storage): + _icon = "storage.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/generic/virtualization.py b/.venv/Lib/site-packages/diagrams/generic/virtualization.py new file mode 100644 index 00000000..f7f9006a --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/generic/virtualization.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Generic + + +class _Virtualization(_Generic): + _type = "virtualization" + _icon_dir = "resources/generic/virtualization" + + +class Qemu(_Virtualization): + _icon = "qemu.png" + + +class Virtualbox(_Virtualization): + _icon = "virtualbox.png" + + +class Vmware(_Virtualization): + _icon = "vmware.png" + + +class XEN(_Virtualization): + _icon = "xen.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/__init__.py b/.venv/Lib/site-packages/diagrams/gis/__init__.py new file mode 100644 index 00000000..684dca02 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/__init__.py @@ -0,0 +1,12 @@ +""" +GIS provides a set of services for Geographic Information Systems provider. +""" + +from diagrams import Node + + +class _GIS(Node): + _provider = "gis" + _icon_dir = "resources/gis" + + fontcolor = "#2d3436" diff --git a/.venv/Lib/site-packages/diagrams/gis/cli.py b/.venv/Lib/site-packages/diagrams/gis/cli.py new file mode 100644 index 00000000..4cf8e9f1 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/cli.py @@ -0,0 +1,35 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Cli(_GIS): + _type = "cli" + _icon_dir = "resources/gis/cli" + + +class Gdal(_Cli): + _icon = "gdal.png" + + +class Imposm(_Cli): + _icon = "imposm.png" + + +class Lastools(_Cli): + _icon = "lastools.png" + + +class Mapnik(_Cli): + _icon = "mapnik.png" + + +class Mdal(_Cli): + _icon = "mdal.png" + + +class Pdal(_Cli): + _icon = "pdal.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/cplusplus.py b/.venv/Lib/site-packages/diagrams/gis/cplusplus.py new file mode 100644 index 00000000..a772d45d --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/cplusplus.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Cplusplus(_GIS): + _type = "cplusplus" + _icon_dir = "resources/gis/cplusplus" + + +class Mapnik(_Cplusplus): + _icon = "mapnik.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/data.py b/.venv/Lib/site-packages/diagrams/gis/data.py new file mode 100644 index 00000000..4da2ff43 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/data.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Data(_GIS): + _type = "data" + _icon_dir = "resources/gis/data" + + +class BAN(_Data): + _icon = "ban.png" + + +class Here(_Data): + _icon = "here.png" + + +class IGN(_Data): + _icon = "ign.png" + + +class Openstreetmap(_Data): + _icon = "openstreetmap.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/database.py b/.venv/Lib/site-packages/diagrams/gis/database.py new file mode 100644 index 00000000..04ee2f72 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/database.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Database(_GIS): + _type = "database" + _icon_dir = "resources/gis/database" + + +class Postgis(_Database): + _icon = "postgis.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/desktop.py b/.venv/Lib/site-packages/diagrams/gis/desktop.py new file mode 100644 index 00000000..b39ff222 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/desktop.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Desktop(_GIS): + _type = "desktop" + _icon_dir = "resources/gis/desktop" + + +class Maptunik(_Desktop): + _icon = "maptunik.png" + + +class QGIS(_Desktop): + _icon = "qgis.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/format.py b/.venv/Lib/site-packages/diagrams/gis/format.py new file mode 100644 index 00000000..3c932a1f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/format.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Format(_GIS): + _type = "format" + _icon_dir = "resources/gis/format" + + +class Geopackage(_Format): + _icon = "geopackage.png" + + +class Geoparquet(_Format): + _icon = "geoparquet.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/geocoding.py b/.venv/Lib/site-packages/diagrams/gis/geocoding.py new file mode 100644 index 00000000..de533277 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/geocoding.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Geocoding(_GIS): + _type = "geocoding" + _icon_dir = "resources/gis/geocoding" + + +class Addok(_Geocoding): + _icon = "addok.png" + + +class Gisgraphy(_Geocoding): + _icon = "gisgraphy.png" + + +class Nominatim(_Geocoding): + _icon = "nominatim.png" + + +class Pelias(_Geocoding): + _icon = "pelias.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/georchestra.py b/.venv/Lib/site-packages/diagrams/gis/georchestra.py new file mode 100644 index 00000000..fc1b8ebf --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/georchestra.py @@ -0,0 +1,11 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Georchestra(_GIS): + _type = "georchestra" + _icon_dir = "resources/gis/georchestra" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/java.py b/.venv/Lib/site-packages/diagrams/gis/java.py new file mode 100644 index 00000000..371b847f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/java.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Java(_GIS): + _type = "java" + _icon_dir = "resources/gis/java" + + +class Geotools(_Java): + _icon = "geotools.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/javascript.py b/.venv/Lib/site-packages/diagrams/gis/javascript.py new file mode 100644 index 00000000..4a9c7391 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/javascript.py @@ -0,0 +1,43 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Javascript(_GIS): + _type = "javascript" + _icon_dir = "resources/gis/javascript" + + +class Cesium(_Javascript): + _icon = "cesium.png" + + +class Geostyler(_Javascript): + _icon = "geostyler.png" + + +class Keplerjs(_Javascript): + _icon = "keplerjs.png" + + +class Leaflet(_Javascript): + _icon = "leaflet.png" + + +class Maplibre(_Javascript): + _icon = "maplibre.png" + + +class OlExt(_Javascript): + _icon = "ol-ext.png" + + +class Openlayers(_Javascript): + _icon = "openlayers.png" + + +class Turfjs(_Javascript): + _icon = "turfjs.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/mobile.py b/.venv/Lib/site-packages/diagrams/gis/mobile.py new file mode 100644 index 00000000..b798eddd --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/mobile.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Mobile(_GIS): + _type = "mobile" + _icon_dir = "resources/gis/mobile" + + +class Mergin(_Mobile): + _icon = "mergin.png" + + +class Qfield(_Mobile): + _icon = "qfield.png" + + +class Smash(_Mobile): + _icon = "smash.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/ogc.py b/.venv/Lib/site-packages/diagrams/gis/ogc.py new file mode 100644 index 00000000..956df61e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/ogc.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _OGC(_GIS): + _type = "ogc" + _icon_dir = "resources/gis/ogc" + + +class OGC(_OGC): + _icon = "ogc.png" + + +class WFS(_OGC): + _icon = "wfs.png" + + +class WMS(_OGC): + _icon = "wms.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/organization.py b/.venv/Lib/site-packages/diagrams/gis/organization.py new file mode 100644 index 00000000..5fc49441 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/organization.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Organization(_GIS): + _type = "organization" + _icon_dir = "resources/gis/organization" + + +class Osgeo(_Organization): + _icon = "osgeo.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/python.py b/.venv/Lib/site-packages/diagrams/gis/python.py new file mode 100644 index 00000000..340139d8 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/python.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Python(_GIS): + _type = "python" + _icon_dir = "resources/gis/python" + + +class Geopandas(_Python): + _icon = "geopandas.png" + + +class Pysal(_Python): + _icon = "pysal.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/routing.py b/.venv/Lib/site-packages/diagrams/gis/routing.py new file mode 100644 index 00000000..2176df71 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/routing.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Routing(_GIS): + _type = "routing" + _icon_dir = "resources/gis/routing" + + +class Graphhopper(_Routing): + _icon = "graphhopper.png" + + +class Osrm(_Routing): + _icon = "osrm.png" + + +class Pgrouting(_Routing): + _icon = "pgrouting.png" + + +class Valhalla(_Routing): + _icon = "valhalla.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/server.py b/.venv/Lib/site-packages/diagrams/gis/server.py new file mode 100644 index 00000000..c6ac4106 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/server.py @@ -0,0 +1,99 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Server(_GIS): + _type = "server" + _icon_dir = "resources/gis/server" + + +class Actinia(_Server): + _icon = "actinia.png" + + +class Baremaps(_Server): + _icon = "baremaps.png" + + +class Deegree(_Server): + _icon = "deegree.png" + + +class G3WSuite(_Server): + _icon = "g3w-suite.png" + + +class Geohealthcheck(_Server): + _icon = "geohealthcheck.png" + + +class Geomapfish(_Server): + _icon = "geomapfish.png" + + +class Geomesa(_Server): + _icon = "geomesa.png" + + +class Geonetwork(_Server): + _icon = "geonetwork.png" + + +class Geonode(_Server): + _icon = "geonode.png" + + +class Georchestra(_Server): + _icon = "georchestra.png" + + +class Geoserver(_Server): + _icon = "geoserver.png" + + +class Geowebcache(_Server): + _icon = "geowebcache.png" + + +class Kepler(_Server): + _icon = "kepler.png" + + +class Mapproxy(_Server): + _icon = "mapproxy.png" + + +class Mapserver(_Server): + _icon = "mapserver.png" + + +class Mapstore(_Server): + _icon = "mapstore.png" + + +class Mviewer(_Server): + _icon = "mviewer.png" + + +class Pg_Tileserv(_Server): + _icon = "pg_tileserv.png" + + +class Pycsw(_Server): + _icon = "pycsw.png" + + +class Pygeoapi(_Server): + _icon = "pygeoapi.png" + + +class QGISServer(_Server): + _icon = "qgis-server.png" + + +class Zooproject(_Server): + _icon = "zooproject.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/gis/toolkit.py b/.venv/Lib/site-packages/diagrams/gis/toolkit.py new file mode 100644 index 00000000..d67d1d81 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/gis/toolkit.py @@ -0,0 +1,11 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _GIS + + +class _Toolkit(_GIS): + _type = "toolkit" + _icon_dir = "resources/gis/toolkit" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/__init__.py b/.venv/Lib/site-packages/diagrams/ibm/__init__.py new file mode 100644 index 00000000..31896d6e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/__init__.py @@ -0,0 +1,16 @@ +""" +IBM provides a set of services for IBM Cloud provider. +""" + +from diagrams import Node + + +class _IBM(Node): + _provider = "ibm" + _icon_dir = "resources/ibm" + + fontcolor = "#ffffff" + + +class IBM(_IBM): + _icon = "ibm.png" diff --git a/.venv/Lib/site-packages/diagrams/ibm/analytics.py b/.venv/Lib/site-packages/diagrams/ibm/analytics.py new file mode 100644 index 00000000..dc641a43 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/analytics.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Analytics(_IBM): + _type = "analytics" + _icon_dir = "resources/ibm/analytics" + + +class Analytics(_Analytics): + _icon = "analytics.png" + + +class DataIntegration(_Analytics): + _icon = "data-integration.png" + + +class DataRepositories(_Analytics): + _icon = "data-repositories.png" + + +class DeviceAnalytics(_Analytics): + _icon = "device-analytics.png" + + +class StreamingComputing(_Analytics): + _icon = "streaming-computing.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/applications.py b/.venv/Lib/site-packages/diagrams/ibm/applications.py new file mode 100644 index 00000000..c9e51713 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/applications.py @@ -0,0 +1,87 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Applications(_IBM): + _type = "applications" + _icon_dir = "resources/ibm/applications" + + +class ActionableInsight(_Applications): + _icon = "actionable-insight.png" + + +class Annotate(_Applications): + _icon = "annotate.png" + + +class ApiDeveloperPortal(_Applications): + _icon = "api-developer-portal.png" + + +class ApiPolyglotRuntimes(_Applications): + _icon = "api-polyglot-runtimes.png" + + +class AppServer(_Applications): + _icon = "app-server.png" + + +class ApplicationLogic(_Applications): + _icon = "application-logic.png" + + +class EnterpriseApplications(_Applications): + _icon = "enterprise-applications.png" + + +class Index(_Applications): + _icon = "index.png" + + +class IotApplication(_Applications): + _icon = "iot-application.png" + + +class Microservice(_Applications): + _icon = "microservice.png" + + +class MobileApp(_Applications): + _icon = "mobile-app.png" + + +class Ontology(_Applications): + _icon = "ontology.png" + + +class OpenSourceTools(_Applications): + _icon = "open-source-tools.png" + + +class RuntimeServices(_Applications): + _icon = "runtime-services.png" + + +class SaasApplications(_Applications): + _icon = "saas-applications.png" + + +class ServiceBroker(_Applications): + _icon = "service-broker.png" + + +class SpeechToText(_Applications): + _icon = "speech-to-text.png" + + +class VisualRecognition(_Applications): + _icon = "visual-recognition.png" + + +class Visualization(_Applications): + _icon = "visualization.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/blockchain.py b/.venv/Lib/site-packages/diagrams/ibm/blockchain.py new file mode 100644 index 00000000..3c5b5a01 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/blockchain.py @@ -0,0 +1,91 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Blockchain(_IBM): + _type = "blockchain" + _icon_dir = "resources/ibm/blockchain" + + +class BlockchainDeveloper(_Blockchain): + _icon = "blockchain-developer.png" + + +class Blockchain(_Blockchain): + _icon = "blockchain.png" + + +class CertificateAuthority(_Blockchain): + _icon = "certificate-authority.png" + + +class ClientApplication(_Blockchain): + _icon = "client-application.png" + + +class Communication(_Blockchain): + _icon = "communication.png" + + +class Consensus(_Blockchain): + _icon = "consensus.png" + + +class EventListener(_Blockchain): + _icon = "event-listener.png" + + +class Event(_Blockchain): + _icon = "event.png" + + +class ExistingEnterpriseSystems(_Blockchain): + _icon = "existing-enterprise-systems.png" + + +class HyperledgerFabric(_Blockchain): + _icon = "hyperledger-fabric.png" + + +class KeyManagement(_Blockchain): + _icon = "key-management.png" + + +class Ledger(_Blockchain): + _icon = "ledger.png" + + +class MembershipServicesProviderApi(_Blockchain): + _icon = "membership-services-provider-api.png" + + +class Membership(_Blockchain): + _icon = "membership.png" + + +class MessageBus(_Blockchain): + _icon = "message-bus.png" + + +class Node(_Blockchain): + _icon = "node.png" + + +class Services(_Blockchain): + _icon = "services.png" + + +class SmartContract(_Blockchain): + _icon = "smart-contract.png" + + +class TransactionManager(_Blockchain): + _icon = "transaction-manager.png" + + +class Wallet(_Blockchain): + _icon = "wallet.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/compute.py b/.venv/Lib/site-packages/diagrams/ibm/compute.py new file mode 100644 index 00000000..7bff4f0d --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/compute.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Compute(_IBM): + _type = "compute" + _icon_dir = "resources/ibm/compute" + + +class BareMetalServer(_Compute): + _icon = "bare-metal-server.png" + + +class ImageService(_Compute): + _icon = "image-service.png" + + +class Instance(_Compute): + _icon = "instance.png" + + +class Key(_Compute): + _icon = "key.png" + + +class PowerInstance(_Compute): + _icon = "power-instance.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/data.py b/.venv/Lib/site-packages/diagrams/ibm/data.py new file mode 100644 index 00000000..2434cd51 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/data.py @@ -0,0 +1,63 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Data(_IBM): + _type = "data" + _icon_dir = "resources/ibm/data" + + +class Caches(_Data): + _icon = "caches.png" + + +class Cloud(_Data): + _icon = "cloud.png" + + +class ConversationTrainedDeployed(_Data): + _icon = "conversation-trained-deployed.png" + + +class DataServices(_Data): + _icon = "data-services.png" + + +class DataSources(_Data): + _icon = "data-sources.png" + + +class DeviceIdentityService(_Data): + _icon = "device-identity-service.png" + + +class DeviceRegistry(_Data): + _icon = "device-registry.png" + + +class EnterpriseData(_Data): + _icon = "enterprise-data.png" + + +class EnterpriseUserDirectory(_Data): + _icon = "enterprise-user-directory.png" + + +class FileRepository(_Data): + _icon = "file-repository.png" + + +class GroundTruth(_Data): + _icon = "ground-truth.png" + + +class Model(_Data): + _icon = "model.png" + + +class TmsDataInterface(_Data): + _icon = "tms-data-interface.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/devops.py b/.venv/Lib/site-packages/diagrams/ibm/devops.py new file mode 100644 index 00000000..fd41d700 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/devops.py @@ -0,0 +1,51 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Devops(_IBM): + _type = "devops" + _icon_dir = "resources/ibm/devops" + + +class ArtifactManagement(_Devops): + _icon = "artifact-management.png" + + +class BuildTest(_Devops): + _icon = "build-test.png" + + +class CodeEditor(_Devops): + _icon = "code-editor.png" + + +class CollaborativeDevelopment(_Devops): + _icon = "collaborative-development.png" + + +class ConfigurationManagement(_Devops): + _icon = "configuration-management.png" + + +class ContinuousDeploy(_Devops): + _icon = "continuous-deploy.png" + + +class ContinuousTesting(_Devops): + _icon = "continuous-testing.png" + + +class Devops(_Devops): + _icon = "devops.png" + + +class Provision(_Devops): + _icon = "provision.png" + + +class ReleaseManagement(_Devops): + _icon = "release-management.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/general.py b/.venv/Lib/site-packages/diagrams/ibm/general.py new file mode 100644 index 00000000..5b490070 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/general.py @@ -0,0 +1,119 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _General(_IBM): + _type = "general" + _icon_dir = "resources/ibm/general" + + +class CloudMessaging(_General): + _icon = "cloud-messaging.png" + + +class CloudServices(_General): + _icon = "cloud-services.png" + + +class Cloudant(_General): + _icon = "cloudant.png" + + +class CognitiveServices(_General): + _icon = "cognitive-services.png" + + +class DataSecurity(_General): + _icon = "data-security.png" + + +class Enterprise(_General): + _icon = "enterprise.png" + + +class GovernanceRiskCompliance(_General): + _icon = "governance-risk-compliance.png" + + +class IBMContainers(_General): + _icon = "ibm-containers.png" + + +class IBMPublicCloud(_General): + _icon = "ibm-public-cloud.png" + + +class IdentityAccessManagement(_General): + _icon = "identity-access-management.png" + + +class IdentityProvider(_General): + _icon = "identity-provider.png" + + +class InfrastructureSecurity(_General): + _icon = "infrastructure-security.png" + + +class Internet(_General): + _icon = "internet.png" + + +class IotCloud(_General): + _icon = "iot-cloud.png" + + +class MicroservicesApplication(_General): + _icon = "microservices-application.png" + + +class MicroservicesMesh(_General): + _icon = "microservices-mesh.png" + + +class MonitoringLogging(_General): + _icon = "monitoring-logging.png" + + +class Monitoring(_General): + _icon = "monitoring.png" + + +class ObjectStorage(_General): + _icon = "object-storage.png" + + +class OfflineCapabilities(_General): + _icon = "offline-capabilities.png" + + +class Openwhisk(_General): + _icon = "openwhisk.png" + + +class PeerCloud(_General): + _icon = "peer-cloud.png" + + +class RetrieveRank(_General): + _icon = "retrieve-rank.png" + + +class Scalable(_General): + _icon = "scalable.png" + + +class ServiceDiscoveryConfiguration(_General): + _icon = "service-discovery-configuration.png" + + +class TextToSpeech(_General): + _icon = "text-to-speech.png" + + +class TransformationConnectivity(_General): + _icon = "transformation-connectivity.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/infrastructure.py b/.venv/Lib/site-packages/diagrams/ibm/infrastructure.py new file mode 100644 index 00000000..48a3c042 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/infrastructure.py @@ -0,0 +1,83 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Infrastructure(_IBM): + _type = "infrastructure" + _icon_dir = "resources/ibm/infrastructure" + + +class Channels(_Infrastructure): + _icon = "channels.png" + + +class CloudMessaging(_Infrastructure): + _icon = "cloud-messaging.png" + + +class Dashboard(_Infrastructure): + _icon = "dashboard.png" + + +class Diagnostics(_Infrastructure): + _icon = "diagnostics.png" + + +class EdgeServices(_Infrastructure): + _icon = "edge-services.png" + + +class EnterpriseMessaging(_Infrastructure): + _icon = "enterprise-messaging.png" + + +class EventFeed(_Infrastructure): + _icon = "event-feed.png" + + +class InfrastructureServices(_Infrastructure): + _icon = "infrastructure-services.png" + + +class InterserviceCommunication(_Infrastructure): + _icon = "interservice-communication.png" + + +class LoadBalancingRouting(_Infrastructure): + _icon = "load-balancing-routing.png" + + +class MicroservicesMesh(_Infrastructure): + _icon = "microservices-mesh.png" + + +class MobileBackend(_Infrastructure): + _icon = "mobile-backend.png" + + +class MobileProviderNetwork(_Infrastructure): + _icon = "mobile-provider-network.png" + + +class MonitoringLogging(_Infrastructure): + _icon = "monitoring-logging.png" + + +class Monitoring(_Infrastructure): + _icon = "monitoring.png" + + +class PeerServices(_Infrastructure): + _icon = "peer-services.png" + + +class ServiceDiscoveryConfiguration(_Infrastructure): + _icon = "service-discovery-configuration.png" + + +class TransformationConnectivity(_Infrastructure): + _icon = "transformation-connectivity.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/management.py b/.venv/Lib/site-packages/diagrams/ibm/management.py new file mode 100644 index 00000000..8ec9ac7c --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/management.py @@ -0,0 +1,71 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Management(_IBM): + _type = "management" + _icon_dir = "resources/ibm/management" + + +class AlertNotification(_Management): + _icon = "alert-notification.png" + + +class ApiManagement(_Management): + _icon = "api-management.png" + + +class CloudManagement(_Management): + _icon = "cloud-management.png" + + +class ClusterManagement(_Management): + _icon = "cluster-management.png" + + +class ContentManagement(_Management): + _icon = "content-management.png" + + +class DataServices(_Management): + _icon = "data-services.png" + + +class DeviceManagement(_Management): + _icon = "device-management.png" + + +class InformationGovernance(_Management): + _icon = "information-governance.png" + + +class ItServiceManagement(_Management): + _icon = "it-service-management.png" + + +class Management(_Management): + _icon = "management.png" + + +class MonitoringMetrics(_Management): + _icon = "monitoring-metrics.png" + + +class ProcessManagement(_Management): + _icon = "process-management.png" + + +class ProviderCloudPortalService(_Management): + _icon = "provider-cloud-portal-service.png" + + +class PushNotifications(_Management): + _icon = "push-notifications.png" + + +class ServiceManagementTools(_Management): + _icon = "service-management-tools.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/network.py b/.venv/Lib/site-packages/diagrams/ibm/network.py new file mode 100644 index 00000000..605319d1 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/network.py @@ -0,0 +1,95 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Network(_IBM): + _type = "network" + _icon_dir = "resources/ibm/network" + + +class Bridge(_Network): + _icon = "bridge.png" + + +class DirectLink(_Network): + _icon = "direct-link.png" + + +class Enterprise(_Network): + _icon = "enterprise.png" + + +class Firewall(_Network): + _icon = "firewall.png" + + +class FloatingIp(_Network): + _icon = "floating-ip.png" + + +class Gateway(_Network): + _icon = "gateway.png" + + +class InternetServices(_Network): + _icon = "internet-services.png" + + +class LoadBalancerListener(_Network): + _icon = "load-balancer-listener.png" + + +class LoadBalancerPool(_Network): + _icon = "load-balancer-pool.png" + + +class LoadBalancer(_Network): + _icon = "load-balancer.png" + + +class LoadBalancingRouting(_Network): + _icon = "load-balancing-routing.png" + + +class PublicGateway(_Network): + _icon = "public-gateway.png" + + +class Region(_Network): + _icon = "region.png" + + +class Router(_Network): + _icon = "router.png" + + +class Rules(_Network): + _icon = "rules.png" + + +class Subnet(_Network): + _icon = "subnet.png" + + +class TransitGateway(_Network): + _icon = "transit-gateway.png" + + +class Vpc(_Network): + _icon = "vpc.png" + + +class VpnConnection(_Network): + _icon = "vpn-connection.png" + + +class VpnGateway(_Network): + _icon = "vpn-gateway.png" + + +class VpnPolicy(_Network): + _icon = "vpn-policy.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/security.py b/.venv/Lib/site-packages/diagrams/ibm/security.py new file mode 100644 index 00000000..ba35efd5 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/security.py @@ -0,0 +1,67 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Security(_IBM): + _type = "security" + _icon_dir = "resources/ibm/security" + + +class ApiSecurity(_Security): + _icon = "api-security.png" + + +class BlockchainSecurityService(_Security): + _icon = "blockchain-security-service.png" + + +class DataSecurity(_Security): + _icon = "data-security.png" + + +class Firewall(_Security): + _icon = "firewall.png" + + +class Gateway(_Security): + _icon = "gateway.png" + + +class GovernanceRiskCompliance(_Security): + _icon = "governance-risk-compliance.png" + + +class IdentityAccessManagement(_Security): + _icon = "identity-access-management.png" + + +class IdentityProvider(_Security): + _icon = "identity-provider.png" + + +class InfrastructureSecurity(_Security): + _icon = "infrastructure-security.png" + + +class PhysicalSecurity(_Security): + _icon = "physical-security.png" + + +class SecurityMonitoringIntelligence(_Security): + _icon = "security-monitoring-intelligence.png" + + +class SecurityServices(_Security): + _icon = "security-services.png" + + +class TrustendComputing(_Security): + _icon = "trustend-computing.png" + + +class Vpn(_Security): + _icon = "vpn.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/social.py b/.venv/Lib/site-packages/diagrams/ibm/social.py new file mode 100644 index 00000000..66fd7c4a --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/social.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Social(_IBM): + _type = "social" + _icon_dir = "resources/ibm/social" + + +class Communities(_Social): + _icon = "communities.png" + + +class FileSync(_Social): + _icon = "file-sync.png" + + +class LiveCollaboration(_Social): + _icon = "live-collaboration.png" + + +class Messaging(_Social): + _icon = "messaging.png" + + +class Networking(_Social): + _icon = "networking.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/storage.py b/.venv/Lib/site-packages/diagrams/ibm/storage.py new file mode 100644 index 00000000..b0f7e2a7 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/storage.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _Storage(_IBM): + _type = "storage" + _icon_dir = "resources/ibm/storage" + + +class BlockStorage(_Storage): + _icon = "block-storage.png" + + +class ObjectStorage(_Storage): + _icon = "object-storage.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/ibm/user.py b/.venv/Lib/site-packages/diagrams/ibm/user.py new file mode 100644 index 00000000..88484706 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/ibm/user.py @@ -0,0 +1,35 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _IBM + + +class _User(_IBM): + _type = "user" + _icon_dir = "resources/ibm/user" + + +class Browser(_User): + _icon = "browser.png" + + +class Device(_User): + _icon = "device.png" + + +class IntegratedDigitalExperiences(_User): + _icon = "integrated-digital-experiences.png" + + +class PhysicalEntity(_User): + _icon = "physical-entity.png" + + +class Sensor(_User): + _icon = "sensor.png" + + +class User(_User): + _icon = "user.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/k8s/__init__.py b/.venv/Lib/site-packages/diagrams/k8s/__init__.py new file mode 100644 index 00000000..871bd2d3 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/__init__.py @@ -0,0 +1,16 @@ +""" +K8S provides a set of services for Kubernetes. +""" + +from diagrams import Node + + +class _K8S(Node): + _provider = "k8s" + _icon_dir = "resources/k8s" + + fontcolor = "#2d3436" + + +class K8S(_K8S): + _icon = "k8s.png" diff --git a/.venv/Lib/site-packages/diagrams/k8s/chaos.py b/.venv/Lib/site-packages/diagrams/k8s/chaos.py new file mode 100644 index 00000000..9f596772 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/chaos.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Chaos(_K8S): + _type = "chaos" + _icon_dir = "resources/k8s/chaos" + + +class ChaosMesh(_Chaos): + _icon = "chaos-mesh.png" + + +class LitmusChaos(_Chaos): + _icon = "litmus-chaos.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/k8s/clusterconfig.py b/.venv/Lib/site-packages/diagrams/k8s/clusterconfig.py new file mode 100644 index 00000000..5a6e3922 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/clusterconfig.py @@ -0,0 +1,26 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Clusterconfig(_K8S): + _type = "clusterconfig" + _icon_dir = "resources/k8s/clusterconfig" + + +class HPA(_Clusterconfig): + _icon = "hpa.png" + + +class Limits(_Clusterconfig): + _icon = "limits.png" + + +class Quota(_Clusterconfig): + _icon = "quota.png" + + +# Aliases + +LimitRange = Limits +HorizontalPodAutoscaler = HPA diff --git a/.venv/Lib/site-packages/diagrams/k8s/compute.py b/.venv/Lib/site-packages/diagrams/k8s/compute.py new file mode 100644 index 00000000..a8078b27 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/compute.py @@ -0,0 +1,44 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Compute(_K8S): + _type = "compute" + _icon_dir = "resources/k8s/compute" + + +class Cronjob(_Compute): + _icon = "cronjob.png" + + +class Deploy(_Compute): + _icon = "deploy.png" + + +class DS(_Compute): + _icon = "ds.png" + + +class Job(_Compute): + _icon = "job.png" + + +class Pod(_Compute): + _icon = "pod.png" + + +class RS(_Compute): + _icon = "rs.png" + + +class STS(_Compute): + _icon = "sts.png" + + +# Aliases + +Deployment = Deploy +DaemonSet = DS +ReplicaSet = RS +StatefulSet = STS diff --git a/.venv/Lib/site-packages/diagrams/k8s/controlplane.py b/.venv/Lib/site-packages/diagrams/k8s/controlplane.py new file mode 100644 index 00000000..8fa11da9 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/controlplane.py @@ -0,0 +1,40 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Controlplane(_K8S): + _type = "controlplane" + _icon_dir = "resources/k8s/controlplane" + + +class API(_Controlplane): + _icon = "api.png" + + +class CCM(_Controlplane): + _icon = "c-c-m.png" + + +class CM(_Controlplane): + _icon = "c-m.png" + + +class KProxy(_Controlplane): + _icon = "k-proxy.png" + + +class Kubelet(_Controlplane): + _icon = "kubelet.png" + + +class Sched(_Controlplane): + _icon = "sched.png" + + +# Aliases + +APIServer = API +ControllerManager = CM +KubeProxy = KProxy +Scheduler = Sched diff --git a/.venv/Lib/site-packages/diagrams/k8s/ecosystem.py b/.venv/Lib/site-packages/diagrams/k8s/ecosystem.py new file mode 100644 index 00000000..41c84734 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/ecosystem.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Ecosystem(_K8S): + _type = "ecosystem" + _icon_dir = "resources/k8s/ecosystem" + + +class ExternalDns(_Ecosystem): + _icon = "external-dns.png" + + +class Helm(_Ecosystem): + _icon = "helm.png" + + +class Krew(_Ecosystem): + _icon = "krew.png" + + +class Kustomize(_Ecosystem): + _icon = "kustomize.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/k8s/group.py b/.venv/Lib/site-packages/diagrams/k8s/group.py new file mode 100644 index 00000000..cb0072ad --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/group.py @@ -0,0 +1,17 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Group(_K8S): + _type = "group" + _icon_dir = "resources/k8s/group" + + +class NS(_Group): + _icon = "ns.png" + + +# Aliases + +Namespace = NS diff --git a/.venv/Lib/site-packages/diagrams/k8s/infra.py b/.venv/Lib/site-packages/diagrams/k8s/infra.py new file mode 100644 index 00000000..aa8fed83 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/infra.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Infra(_K8S): + _type = "infra" + _icon_dir = "resources/k8s/infra" + + +class ETCD(_Infra): + _icon = "etcd.png" + + +class Master(_Infra): + _icon = "master.png" + + +class Node(_Infra): + _icon = "node.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/k8s/network.py b/.venv/Lib/site-packages/diagrams/k8s/network.py new file mode 100644 index 00000000..7436f549 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/network.py @@ -0,0 +1,32 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Network(_K8S): + _type = "network" + _icon_dir = "resources/k8s/network" + + +class Ep(_Network): + _icon = "ep.png" + + +class Ing(_Network): + _icon = "ing.png" + + +class Netpol(_Network): + _icon = "netpol.png" + + +class SVC(_Network): + _icon = "svc.png" + + +# Aliases + +Endpoint = Ep +Ingress = Ing +NetworkPolicy = Netpol +Service = SVC diff --git a/.venv/Lib/site-packages/diagrams/k8s/others.py b/.venv/Lib/site-packages/diagrams/k8s/others.py new file mode 100644 index 00000000..9a3e9c0f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/others.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Others(_K8S): + _type = "others" + _icon_dir = "resources/k8s/others" + + +class CRD(_Others): + _icon = "crd.png" + + +class PSP(_Others): + _icon = "psp.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/k8s/podconfig.py b/.venv/Lib/site-packages/diagrams/k8s/podconfig.py new file mode 100644 index 00000000..4e50c209 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/podconfig.py @@ -0,0 +1,21 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Podconfig(_K8S): + _type = "podconfig" + _icon_dir = "resources/k8s/podconfig" + + +class CM(_Podconfig): + _icon = "cm.png" + + +class Secret(_Podconfig): + _icon = "secret.png" + + +# Aliases + +ConfigMap = CM diff --git a/.venv/Lib/site-packages/diagrams/k8s/rbac.py b/.venv/Lib/site-packages/diagrams/k8s/rbac.py new file mode 100644 index 00000000..4d17f84c --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/rbac.py @@ -0,0 +1,44 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Rbac(_K8S): + _type = "rbac" + _icon_dir = "resources/k8s/rbac" + + +class CRole(_Rbac): + _icon = "c-role.png" + + +class CRB(_Rbac): + _icon = "crb.png" + + +class Group(_Rbac): + _icon = "group.png" + + +class RB(_Rbac): + _icon = "rb.png" + + +class Role(_Rbac): + _icon = "role.png" + + +class SA(_Rbac): + _icon = "sa.png" + + +class User(_Rbac): + _icon = "user.png" + + +# Aliases + +ClusterRole = CRole +ClusterRoleBinding = CRB +RoleBinding = RB +ServiceAccount = SA diff --git a/.venv/Lib/site-packages/diagrams/k8s/storage.py b/.venv/Lib/site-packages/diagrams/k8s/storage.py new file mode 100644 index 00000000..1b166e64 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/k8s/storage.py @@ -0,0 +1,32 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _K8S + + +class _Storage(_K8S): + _type = "storage" + _icon_dir = "resources/k8s/storage" + + +class PV(_Storage): + _icon = "pv.png" + + +class PVC(_Storage): + _icon = "pvc.png" + + +class SC(_Storage): + _icon = "sc.png" + + +class Vol(_Storage): + _icon = "vol.png" + + +# Aliases + +PersistentVolume = PV +PersistentVolumeClaim = PVC +StorageClass = SC +Volume = Vol diff --git a/.venv/Lib/site-packages/diagrams/oci/__init__.py b/.venv/Lib/site-packages/diagrams/oci/__init__.py new file mode 100644 index 00000000..8dacc920 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/oci/__init__.py @@ -0,0 +1,16 @@ +""" +OCI provides a set of services for Oracle Cloud Infrastructure provider. +""" + +from diagrams import Node + + +class _OCI(Node): + _provider = "oci" + _icon_dir = "resources/oci" + + fontcolor = "#312D2A" + + +class OCI(_OCI): + _icon = "oci.png" diff --git a/.venv/Lib/site-packages/diagrams/oci/compute.py b/.venv/Lib/site-packages/diagrams/oci/compute.py new file mode 100644 index 00000000..ed268d8c --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/oci/compute.py @@ -0,0 +1,84 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OCI + + +class _Compute(_OCI): + _type = "compute" + _icon_dir = "resources/oci/compute" + + +class AutoscaleWhite(_Compute): + _icon = "autoscale-white.png" + + +class Autoscale(_Compute): + _icon = "autoscale.png" + + +class BMWhite(_Compute): + _icon = "bm-white.png" + + +class BM(_Compute): + _icon = "bm.png" + + +class ContainerWhite(_Compute): + _icon = "container-white.png" + + +class Container(_Compute): + _icon = "container.png" + + +class FunctionsWhite(_Compute): + _icon = "functions-white.png" + + +class Functions(_Compute): + _icon = "functions.png" + + +class InstancePoolsWhite(_Compute): + _icon = "instance-pools-white.png" + + +class InstancePools(_Compute): + _icon = "instance-pools.png" + + +class OCIRWhite(_Compute): + _icon = "ocir-white.png" + + +class OCIR(_Compute): + _icon = "ocir.png" + + +class OKEWhite(_Compute): + _icon = "oke-white.png" + + +class OKE(_Compute): + _icon = "oke.png" + + +class VMWhite(_Compute): + _icon = "vm-white.png" + + +class VM(_Compute): + _icon = "vm.png" + + +# Aliases + +VirtualMachine = VM +VirtualMachineWhite = VMWhite +BareMetal = BM +BareMetalWhite = BMWhite +OCIRegistry = OCIR +OCIRegistryWhite = OCIRWhite +ContainerEngine = OKE +ContainerEngineWhite = OKEWhite diff --git a/.venv/Lib/site-packages/diagrams/oci/connectivity.py b/.venv/Lib/site-packages/diagrams/oci/connectivity.py new file mode 100644 index 00000000..658ac0a7 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/oci/connectivity.py @@ -0,0 +1,83 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OCI + + +class _Connectivity(_OCI): + _type = "connectivity" + _icon_dir = "resources/oci/connectivity" + + +class BackboneWhite(_Connectivity): + _icon = "backbone-white.png" + + +class Backbone(_Connectivity): + _icon = "backbone.png" + + +class CDNWhite(_Connectivity): + _icon = "cdn-white.png" + + +class CDN(_Connectivity): + _icon = "cdn.png" + + +class CustomerDatacenter(_Connectivity): + _icon = "customer-datacenter.png" + + +class CustomerDatacntrWhite(_Connectivity): + _icon = "customer-datacntr-white.png" + + +class CustomerPremisesWhite(_Connectivity): + _icon = "customer-premises-white.png" + + +class CustomerPremises(_Connectivity): + _icon = "customer-premises.png" + + +class DisconnectedRegionsWhite(_Connectivity): + _icon = "disconnected-regions-white.png" + + +class DisconnectedRegions(_Connectivity): + _icon = "disconnected-regions.png" + + +class DNSWhite(_Connectivity): + _icon = "dns-white.png" + + +class DNS(_Connectivity): + _icon = "dns.png" + + +class FastConnectWhite(_Connectivity): + _icon = "fast-connect-white.png" + + +class FastConnect(_Connectivity): + _icon = "fast-connect.png" + + +class NATGatewayWhite(_Connectivity): + _icon = "nat-gateway-white.png" + + +class NATGateway(_Connectivity): + _icon = "nat-gateway.png" + + +class VPNWhite(_Connectivity): + _icon = "vpn-white.png" + + +class VPN(_Connectivity): + _icon = "vpn.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/oci/database.py b/.venv/Lib/site-packages/diagrams/oci/database.py new file mode 100644 index 00000000..84048342 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/oci/database.py @@ -0,0 +1,88 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OCI + + +class _Database(_OCI): + _type = "database" + _icon_dir = "resources/oci/database" + + +class AutonomousWhite(_Database): + _icon = "autonomous-white.png" + + +class Autonomous(_Database): + _icon = "autonomous.png" + + +class BigdataServiceWhite(_Database): + _icon = "bigdata-service-white.png" + + +class BigdataService(_Database): + _icon = "bigdata-service.png" + + +class DatabaseServiceWhite(_Database): + _icon = "database-service-white.png" + + +class DatabaseService(_Database): + _icon = "database-service.png" + + +class DataflowApacheWhite(_Database): + _icon = "dataflow-apache-white.png" + + +class DataflowApache(_Database): + _icon = "dataflow-apache.png" + + +class DcatWhite(_Database): + _icon = "dcat-white.png" + + +class Dcat(_Database): + _icon = "dcat.png" + + +class DisWhite(_Database): + _icon = "dis-white.png" + + +class Dis(_Database): + _icon = "dis.png" + + +class DMSWhite(_Database): + _icon = "dms-white.png" + + +class DMS(_Database): + _icon = "dms.png" + + +class ScienceWhite(_Database): + _icon = "science-white.png" + + +class Science(_Database): + _icon = "science.png" + + +class StreamWhite(_Database): + _icon = "stream-white.png" + + +class Stream(_Database): + _icon = "stream.png" + + +# Aliases + +ADB = Autonomous +ADBWhite = AutonomousWhite +DBService = DatabaseService +DBServiceWhite = DatabaseServiceWhite diff --git a/.venv/Lib/site-packages/diagrams/oci/devops.py b/.venv/Lib/site-packages/diagrams/oci/devops.py new file mode 100644 index 00000000..7089868e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/oci/devops.py @@ -0,0 +1,35 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OCI + + +class _Devops(_OCI): + _type = "devops" + _icon_dir = "resources/oci/devops" + + +class APIGatewayWhite(_Devops): + _icon = "api-gateway-white.png" + + +class APIGateway(_Devops): + _icon = "api-gateway.png" + + +class APIServiceWhite(_Devops): + _icon = "api-service-white.png" + + +class APIService(_Devops): + _icon = "api-service.png" + + +class ResourceMgmtWhite(_Devops): + _icon = "resource-mgmt-white.png" + + +class ResourceMgmt(_Devops): + _icon = "resource-mgmt.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/oci/governance.py b/.venv/Lib/site-packages/diagrams/oci/governance.py new file mode 100644 index 00000000..914919af --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/oci/governance.py @@ -0,0 +1,67 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OCI + + +class _Governance(_OCI): + _type = "governance" + _icon_dir = "resources/oci/governance" + + +class AuditWhite(_Governance): + _icon = "audit-white.png" + + +class Audit(_Governance): + _icon = "audit.png" + + +class CompartmentsWhite(_Governance): + _icon = "compartments-white.png" + + +class Compartments(_Governance): + _icon = "compartments.png" + + +class GroupsWhite(_Governance): + _icon = "groups-white.png" + + +class Groups(_Governance): + _icon = "groups.png" + + +class LoggingWhite(_Governance): + _icon = "logging-white.png" + + +class Logging(_Governance): + _icon = "logging.png" + + +class OCIDWhite(_Governance): + _icon = "ocid-white.png" + + +class OCID(_Governance): + _icon = "ocid.png" + + +class PoliciesWhite(_Governance): + _icon = "policies-white.png" + + +class Policies(_Governance): + _icon = "policies.png" + + +class TaggingWhite(_Governance): + _icon = "tagging-white.png" + + +class Tagging(_Governance): + _icon = "tagging.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/oci/monitoring.py b/.venv/Lib/site-packages/diagrams/oci/monitoring.py new file mode 100644 index 00000000..ae05bbaa --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/oci/monitoring.py @@ -0,0 +1,83 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OCI + + +class _Monitoring(_OCI): + _type = "monitoring" + _icon_dir = "resources/oci/monitoring" + + +class AlarmWhite(_Monitoring): + _icon = "alarm-white.png" + + +class Alarm(_Monitoring): + _icon = "alarm.png" + + +class EmailWhite(_Monitoring): + _icon = "email-white.png" + + +class Email(_Monitoring): + _icon = "email.png" + + +class EventsWhite(_Monitoring): + _icon = "events-white.png" + + +class Events(_Monitoring): + _icon = "events.png" + + +class HealthCheckWhite(_Monitoring): + _icon = "health-check-white.png" + + +class HealthCheck(_Monitoring): + _icon = "health-check.png" + + +class NotificationsWhite(_Monitoring): + _icon = "notifications-white.png" + + +class Notifications(_Monitoring): + _icon = "notifications.png" + + +class QueueWhite(_Monitoring): + _icon = "queue-white.png" + + +class Queue(_Monitoring): + _icon = "queue.png" + + +class SearchWhite(_Monitoring): + _icon = "search-white.png" + + +class Search(_Monitoring): + _icon = "search.png" + + +class TelemetryWhite(_Monitoring): + _icon = "telemetry-white.png" + + +class Telemetry(_Monitoring): + _icon = "telemetry.png" + + +class WorkflowWhite(_Monitoring): + _icon = "workflow-white.png" + + +class Workflow(_Monitoring): + _icon = "workflow.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/oci/network.py b/.venv/Lib/site-packages/diagrams/oci/network.py new file mode 100644 index 00000000..6175cb6f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/oci/network.py @@ -0,0 +1,75 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OCI + + +class _Network(_OCI): + _type = "network" + _icon_dir = "resources/oci/network" + + +class DrgWhite(_Network): + _icon = "drg-white.png" + + +class Drg(_Network): + _icon = "drg.png" + + +class FirewallWhite(_Network): + _icon = "firewall-white.png" + + +class Firewall(_Network): + _icon = "firewall.png" + + +class InternetGatewayWhite(_Network): + _icon = "internet-gateway-white.png" + + +class InternetGateway(_Network): + _icon = "internet-gateway.png" + + +class LoadBalancerWhite(_Network): + _icon = "load-balancer-white.png" + + +class LoadBalancer(_Network): + _icon = "load-balancer.png" + + +class RouteTableWhite(_Network): + _icon = "route-table-white.png" + + +class RouteTable(_Network): + _icon = "route-table.png" + + +class SecurityListsWhite(_Network): + _icon = "security-lists-white.png" + + +class SecurityLists(_Network): + _icon = "security-lists.png" + + +class ServiceGatewayWhite(_Network): + _icon = "service-gateway-white.png" + + +class ServiceGateway(_Network): + _icon = "service-gateway.png" + + +class VcnWhite(_Network): + _icon = "vcn-white.png" + + +class Vcn(_Network): + _icon = "vcn.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/oci/security.py b/.venv/Lib/site-packages/diagrams/oci/security.py new file mode 100644 index 00000000..5075da81 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/oci/security.py @@ -0,0 +1,75 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OCI + + +class _Security(_OCI): + _type = "security" + _icon_dir = "resources/oci/security" + + +class CloudGuardWhite(_Security): + _icon = "cloud-guard-white.png" + + +class CloudGuard(_Security): + _icon = "cloud-guard.png" + + +class DDOSWhite(_Security): + _icon = "ddos-white.png" + + +class DDOS(_Security): + _icon = "ddos.png" + + +class EncryptionWhite(_Security): + _icon = "encryption-white.png" + + +class Encryption(_Security): + _icon = "encryption.png" + + +class IDAccessWhite(_Security): + _icon = "id-access-white.png" + + +class IDAccess(_Security): + _icon = "id-access.png" + + +class KeyManagementWhite(_Security): + _icon = "key-management-white.png" + + +class KeyManagement(_Security): + _icon = "key-management.png" + + +class MaxSecurityZoneWhite(_Security): + _icon = "max-security-zone-white.png" + + +class MaxSecurityZone(_Security): + _icon = "max-security-zone.png" + + +class VaultWhite(_Security): + _icon = "vault-white.png" + + +class Vault(_Security): + _icon = "vault.png" + + +class WAFWhite(_Security): + _icon = "waf-white.png" + + +class WAF(_Security): + _icon = "waf.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/oci/storage.py b/.venv/Lib/site-packages/diagrams/oci/storage.py new file mode 100644 index 00000000..e1a57165 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/oci/storage.py @@ -0,0 +1,83 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OCI + + +class _Storage(_OCI): + _type = "storage" + _icon_dir = "resources/oci/storage" + + +class BackupRestoreWhite(_Storage): + _icon = "backup-restore-white.png" + + +class BackupRestore(_Storage): + _icon = "backup-restore.png" + + +class BlockStorageCloneWhite(_Storage): + _icon = "block-storage-clone-white.png" + + +class BlockStorageClone(_Storage): + _icon = "block-storage-clone.png" + + +class BlockStorageWhite(_Storage): + _icon = "block-storage-white.png" + + +class BlockStorage(_Storage): + _icon = "block-storage.png" + + +class BucketsWhite(_Storage): + _icon = "buckets-white.png" + + +class Buckets(_Storage): + _icon = "buckets.png" + + +class DataTransferWhite(_Storage): + _icon = "data-transfer-white.png" + + +class DataTransfer(_Storage): + _icon = "data-transfer.png" + + +class ElasticPerformanceWhite(_Storage): + _icon = "elastic-performance-white.png" + + +class ElasticPerformance(_Storage): + _icon = "elastic-performance.png" + + +class FileStorageWhite(_Storage): + _icon = "file-storage-white.png" + + +class FileStorage(_Storage): + _icon = "file-storage.png" + + +class ObjectStorageWhite(_Storage): + _icon = "object-storage-white.png" + + +class ObjectStorage(_Storage): + _icon = "object-storage.png" + + +class StorageGatewayWhite(_Storage): + _icon = "storage-gateway-white.png" + + +class StorageGateway(_Storage): + _icon = "storage-gateway.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/__init__.py b/.venv/Lib/site-packages/diagrams/onprem/__init__.py new file mode 100644 index 00000000..232c0630 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/__init__.py @@ -0,0 +1,16 @@ +""" +OnPrem provides a set of general on-premises services. +""" + +from diagrams import Node + + +class _OnPrem(Node): + _provider = "onprem" + _icon_dir = "resources/onprem" + + fontcolor = "#ffffff" + + +class OnPrem(_OnPrem): + _icon = "onprem.png" diff --git a/.venv/Lib/site-packages/diagrams/onprem/aggregator.py b/.venv/Lib/site-packages/diagrams/onprem/aggregator.py new file mode 100644 index 00000000..01f3c77a --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/aggregator.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Aggregator(_OnPrem): + _type = "aggregator" + _icon_dir = "resources/onprem/aggregator" + + +class Fluentd(_Aggregator): + _icon = "fluentd.png" + + +class Vector(_Aggregator): + _icon = "vector.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/analytics.py b/.venv/Lib/site-packages/diagrams/onprem/analytics.py new file mode 100644 index 00000000..7e3ee9a3 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/analytics.py @@ -0,0 +1,81 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Analytics(_OnPrem): + _type = "analytics" + _icon_dir = "resources/onprem/analytics" + + +class Beam(_Analytics): + _icon = "beam.png" + + +class Databricks(_Analytics): + _icon = "databricks.png" + + +class Dbt(_Analytics): + _icon = "dbt.png" + + +class Dremio(_Analytics): + _icon = "dremio.png" + + +class Flink(_Analytics): + _icon = "flink.png" + + +class Hadoop(_Analytics): + _icon = "hadoop.png" + + +class Hive(_Analytics): + _icon = "hive.png" + + +class Metabase(_Analytics): + _icon = "metabase.png" + + +class Norikra(_Analytics): + _icon = "norikra.png" + + +class Powerbi(_Analytics): + _icon = "powerbi.png" + + +class Presto(_Analytics): + _icon = "presto.png" + + +class Singer(_Analytics): + _icon = "singer.png" + + +class Spark(_Analytics): + _icon = "spark.png" + + +class Storm(_Analytics): + _icon = "storm.png" + + +class Superset(_Analytics): + _icon = "superset.png" + + +class Tableau(_Analytics): + _icon = "tableau.png" + + +class Trino(_Analytics): + _icon = "trino.png" + + +# Aliases + +PowerBI = Powerbi diff --git a/.venv/Lib/site-packages/diagrams/onprem/auth.py b/.venv/Lib/site-packages/diagrams/onprem/auth.py new file mode 100644 index 00000000..dc98f688 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/auth.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Auth(_OnPrem): + _type = "auth" + _icon_dir = "resources/onprem/auth" + + +class Boundary(_Auth): + _icon = "boundary.png" + + +class BuzzfeedSso(_Auth): + _icon = "buzzfeed-sso.png" + + +class Oauth2Proxy(_Auth): + _icon = "oauth2-proxy.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/cd.py b/.venv/Lib/site-packages/diagrams/onprem/cd.py new file mode 100644 index 00000000..bda0b682 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/cd.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Cd(_OnPrem): + _type = "cd" + _icon_dir = "resources/onprem/cd" + + +class Spinnaker(_Cd): + _icon = "spinnaker.png" + + +class TektonCli(_Cd): + _icon = "tekton-cli.png" + + +class Tekton(_Cd): + _icon = "tekton.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/certificates.py b/.venv/Lib/site-packages/diagrams/onprem/certificates.py new file mode 100644 index 00000000..030e8a42 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/certificates.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Certificates(_OnPrem): + _type = "certificates" + _icon_dir = "resources/onprem/certificates" + + +class CertManager(_Certificates): + _icon = "cert-manager.png" + + +class LetsEncrypt(_Certificates): + _icon = "lets-encrypt.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/ci.py b/.venv/Lib/site-packages/diagrams/onprem/ci.py new file mode 100644 index 00000000..cdb25336 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/ci.py @@ -0,0 +1,55 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Ci(_OnPrem): + _type = "ci" + _icon_dir = "resources/onprem/ci" + + +class Circleci(_Ci): + _icon = "circleci.png" + + +class Concourseci(_Ci): + _icon = "concourseci.png" + + +class Droneci(_Ci): + _icon = "droneci.png" + + +class GithubActions(_Ci): + _icon = "github-actions.png" + + +class Gitlabci(_Ci): + _icon = "gitlabci.png" + + +class Jenkins(_Ci): + _icon = "jenkins.png" + + +class Teamcity(_Ci): + _icon = "teamcity.png" + + +class Travisci(_Ci): + _icon = "travisci.png" + + +class Zuulci(_Ci): + _icon = "zuulci.png" + + +# Aliases + +CircleCI = Circleci +ConcourseCI = Concourseci +DroneCI = Droneci +GitlabCI = Gitlabci +TravisCI = Travisci +TC = Teamcity +ZuulCI = Zuulci diff --git a/.venv/Lib/site-packages/diagrams/onprem/client.py b/.venv/Lib/site-packages/diagrams/onprem/client.py new file mode 100644 index 00000000..72290608 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/client.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Client(_OnPrem): + _type = "client" + _icon_dir = "resources/onprem/client" + + +class Client(_Client): + _icon = "client.png" + + +class User(_Client): + _icon = "user.png" + + +class Users(_Client): + _icon = "users.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/compute.py b/.venv/Lib/site-packages/diagrams/onprem/compute.py new file mode 100644 index 00000000..0e07cf28 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/compute.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Compute(_OnPrem): + _type = "compute" + _icon_dir = "resources/onprem/compute" + + +class Nomad(_Compute): + _icon = "nomad.png" + + +class Server(_Compute): + _icon = "server.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/container.py b/.venv/Lib/site-packages/diagrams/onprem/container.py new file mode 100644 index 00000000..6e4f5d5d --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/container.py @@ -0,0 +1,46 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Container(_OnPrem): + _type = "container" + _icon_dir = "resources/onprem/container" + + +class Containerd(_Container): + _icon = "containerd.png" + + +class Crio(_Container): + _icon = "crio.png" + + +class Docker(_Container): + _icon = "docker.png" + + +class Firecracker(_Container): + _icon = "firecracker.png" + + +class Gvisor(_Container): + _icon = "gvisor.png" + + +class K3S(_Container): + _icon = "k3s.png" + + +class Lxc(_Container): + _icon = "lxc.png" + + +class Rkt(_Container): + _icon = "rkt.png" + + +# Aliases + +LXC = Lxc +RKT = Rkt diff --git a/.venv/Lib/site-packages/diagrams/onprem/database.py b/.venv/Lib/site-packages/diagrams/onprem/database.py new file mode 100644 index 00000000..40ed89ca --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/database.py @@ -0,0 +1,95 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Database(_OnPrem): + _type = "database" + _icon_dir = "resources/onprem/database" + + +class Cassandra(_Database): + _icon = "cassandra.png" + + +class Clickhouse(_Database): + _icon = "clickhouse.png" + + +class Cockroachdb(_Database): + _icon = "cockroachdb.png" + + +class Couchbase(_Database): + _icon = "couchbase.png" + + +class Couchdb(_Database): + _icon = "couchdb.png" + + +class Dgraph(_Database): + _icon = "dgraph.png" + + +class Druid(_Database): + _icon = "druid.png" + + +class Hbase(_Database): + _icon = "hbase.png" + + +class Influxdb(_Database): + _icon = "influxdb.png" + + +class Janusgraph(_Database): + _icon = "janusgraph.png" + + +class Mariadb(_Database): + _icon = "mariadb.png" + + +class Mongodb(_Database): + _icon = "mongodb.png" + + +class Mssql(_Database): + _icon = "mssql.png" + + +class Mysql(_Database): + _icon = "mysql.png" + + +class Neo4J(_Database): + _icon = "neo4j.png" + + +class Oracle(_Database): + _icon = "oracle.png" + + +class Postgresql(_Database): + _icon = "postgresql.png" + + +class Scylla(_Database): + _icon = "scylla.png" + + +# Aliases + +ClickHouse = Clickhouse +CockroachDB = Cockroachdb +CouchDB = Couchdb +HBase = Hbase +InfluxDB = Influxdb +JanusGraph = Janusgraph +MariaDB = Mariadb +MongoDB = Mongodb +MSSQL = Mssql +MySQL = Mysql +PostgreSQL = Postgresql diff --git a/.venv/Lib/site-packages/diagrams/onprem/dns.py b/.venv/Lib/site-packages/diagrams/onprem/dns.py new file mode 100644 index 00000000..3e1adf61 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/dns.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Dns(_OnPrem): + _type = "dns" + _icon_dir = "resources/onprem/dns" + + +class Coredns(_Dns): + _icon = "coredns.png" + + +class Powerdns(_Dns): + _icon = "powerdns.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/etl.py b/.venv/Lib/site-packages/diagrams/onprem/etl.py new file mode 100644 index 00000000..6f4c88ba --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/etl.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Etl(_OnPrem): + _type = "etl" + _icon_dir = "resources/onprem/etl" + + +class Embulk(_Etl): + _icon = "embulk.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/gitops.py b/.venv/Lib/site-packages/diagrams/onprem/gitops.py new file mode 100644 index 00000000..c4702c62 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/gitops.py @@ -0,0 +1,25 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Gitops(_OnPrem): + _type = "gitops" + _icon_dir = "resources/onprem/gitops" + + +class Argocd(_Gitops): + _icon = "argocd.png" + + +class Flagger(_Gitops): + _icon = "flagger.png" + + +class Flux(_Gitops): + _icon = "flux.png" + + +# Aliases + +ArgoCD = Argocd diff --git a/.venv/Lib/site-packages/diagrams/onprem/groupware.py b/.venv/Lib/site-packages/diagrams/onprem/groupware.py new file mode 100644 index 00000000..11e02dc4 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/groupware.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Groupware(_OnPrem): + _type = "groupware" + _icon_dir = "resources/onprem/groupware" + + +class Nextcloud(_Groupware): + _icon = "nextcloud.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/iac.py b/.venv/Lib/site-packages/diagrams/onprem/iac.py new file mode 100644 index 00000000..c56284ac --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/iac.py @@ -0,0 +1,35 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Iac(_OnPrem): + _type = "iac" + _icon_dir = "resources/onprem/iac" + + +class Ansible(_Iac): + _icon = "ansible.png" + + +class Atlantis(_Iac): + _icon = "atlantis.png" + + +class Awx(_Iac): + _icon = "awx.png" + + +class Pulumi(_Iac): + _icon = "pulumi.png" + + +class Puppet(_Iac): + _icon = "puppet.png" + + +class Terraform(_Iac): + _icon = "terraform.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/identity.py b/.venv/Lib/site-packages/diagrams/onprem/identity.py new file mode 100644 index 00000000..f19ef413 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/identity.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Identity(_OnPrem): + _type = "identity" + _icon_dir = "resources/onprem/identity" + + +class Dex(_Identity): + _icon = "dex.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/inmemory.py b/.venv/Lib/site-packages/diagrams/onprem/inmemory.py new file mode 100644 index 00000000..458e2d0f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/inmemory.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Inmemory(_OnPrem): + _type = "inmemory" + _icon_dir = "resources/onprem/inmemory" + + +class Aerospike(_Inmemory): + _icon = "aerospike.png" + + +class Hazelcast(_Inmemory): + _icon = "hazelcast.png" + + +class Memcached(_Inmemory): + _icon = "memcached.png" + + +class Redis(_Inmemory): + _icon = "redis.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/logging.py b/.venv/Lib/site-packages/diagrams/onprem/logging.py new file mode 100644 index 00000000..f56b2339 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/logging.py @@ -0,0 +1,34 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Logging(_OnPrem): + _type = "logging" + _icon_dir = "resources/onprem/logging" + + +class Fluentbit(_Logging): + _icon = "fluentbit.png" + + +class Graylog(_Logging): + _icon = "graylog.png" + + +class Loki(_Logging): + _icon = "loki.png" + + +class Rsyslog(_Logging): + _icon = "rsyslog.png" + + +class SyslogNg(_Logging): + _icon = "syslog-ng.png" + + +# Aliases + +FluentBit = Fluentbit +RSyslog = Rsyslog diff --git a/.venv/Lib/site-packages/diagrams/onprem/messaging.py b/.venv/Lib/site-packages/diagrams/onprem/messaging.py new file mode 100644 index 00000000..65f33c30 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/messaging.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Messaging(_OnPrem): + _type = "messaging" + _icon_dir = "resources/onprem/messaging" + + +class Centrifugo(_Messaging): + _icon = "centrifugo.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/mlops.py b/.venv/Lib/site-packages/diagrams/onprem/mlops.py new file mode 100644 index 00000000..dd9fd862 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/mlops.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Mlops(_OnPrem): + _type = "mlops" + _icon_dir = "resources/onprem/mlops" + + +class Mlflow(_Mlops): + _icon = "mlflow.png" + + +class Polyaxon(_Mlops): + _icon = "polyaxon.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/monitoring.py b/.venv/Lib/site-packages/diagrams/onprem/monitoring.py new file mode 100644 index 00000000..2423a2db --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/monitoring.py @@ -0,0 +1,67 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Monitoring(_OnPrem): + _type = "monitoring" + _icon_dir = "resources/onprem/monitoring" + + +class Cortex(_Monitoring): + _icon = "cortex.png" + + +class Datadog(_Monitoring): + _icon = "datadog.png" + + +class Dynatrace(_Monitoring): + _icon = "dynatrace.png" + + +class Grafana(_Monitoring): + _icon = "grafana.png" + + +class Humio(_Monitoring): + _icon = "humio.png" + + +class Mimir(_Monitoring): + _icon = "mimir.png" + + +class Nagios(_Monitoring): + _icon = "nagios.png" + + +class Newrelic(_Monitoring): + _icon = "newrelic.png" + + +class PrometheusOperator(_Monitoring): + _icon = "prometheus-operator.png" + + +class Prometheus(_Monitoring): + _icon = "prometheus.png" + + +class Sentry(_Monitoring): + _icon = "sentry.png" + + +class Splunk(_Monitoring): + _icon = "splunk.png" + + +class Thanos(_Monitoring): + _icon = "thanos.png" + + +class Zabbix(_Monitoring): + _icon = "zabbix.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/network.py b/.venv/Lib/site-packages/diagrams/onprem/network.py new file mode 100644 index 00000000..7ef068b5 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/network.py @@ -0,0 +1,142 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Network(_OnPrem): + _type = "network" + _icon_dir = "resources/onprem/network" + + +class Ambassador(_Network): + _icon = "ambassador.png" + + +class Apache(_Network): + _icon = "apache.png" + + +class Bind9(_Network): + _icon = "bind-9.png" + + +class Caddy(_Network): + _icon = "caddy.png" + + +class Consul(_Network): + _icon = "consul.png" + + +class Envoy(_Network): + _icon = "envoy.png" + + +class Etcd(_Network): + _icon = "etcd.png" + + +class Glassfish(_Network): + _icon = "glassfish.png" + + +class Gunicorn(_Network): + _icon = "gunicorn.png" + + +class Haproxy(_Network): + _icon = "haproxy.png" + + +class Internet(_Network): + _icon = "internet.png" + + +class Istio(_Network): + _icon = "istio.png" + + +class Jbossas(_Network): + _icon = "jbossas.png" + + +class Jetty(_Network): + _icon = "jetty.png" + + +class Kong(_Network): + _icon = "kong.png" + + +class Linkerd(_Network): + _icon = "linkerd.png" + + +class Mikrotik(_Network): + _icon = "mikrotik.png" + + +class Nginx(_Network): + _icon = "nginx.png" + + +class Ocelot(_Network): + _icon = "ocelot.png" + + +class OpenServiceMesh(_Network): + _icon = "open-service-mesh.png" + + +class Opnsense(_Network): + _icon = "opnsense.png" + + +class Pfsense(_Network): + _icon = "pfsense.png" + + +class Pomerium(_Network): + _icon = "pomerium.png" + + +class Powerdns(_Network): + _icon = "powerdns.png" + + +class Tomcat(_Network): + _icon = "tomcat.png" + + +class Traefik(_Network): + _icon = "traefik.png" + + +class Tyk(_Network): + _icon = "tyk.png" + + +class Vyos(_Network): + _icon = "vyos.png" + + +class Wildfly(_Network): + _icon = "wildfly.png" + + +class Yarp(_Network): + _icon = "yarp.png" + + +class Zookeeper(_Network): + _icon = "zookeeper.png" + + +# Aliases + +ETCD = Etcd +HAProxy = Haproxy +OSM = OpenServiceMesh +OPNSense = Opnsense +PFSense = Pfsense +VyOS = Vyos diff --git a/.venv/Lib/site-packages/diagrams/onprem/proxmox.py b/.venv/Lib/site-packages/diagrams/onprem/proxmox.py new file mode 100644 index 00000000..64fc4f44 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/proxmox.py @@ -0,0 +1,17 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Proxmox(_OnPrem): + _type = "proxmox" + _icon_dir = "resources/onprem/proxmox" + + +class Pve(_Proxmox): + _icon = "pve.png" + + +# Aliases + +ProxmoxVE = Pve diff --git a/.venv/Lib/site-packages/diagrams/onprem/queue.py b/.venv/Lib/site-packages/diagrams/onprem/queue.py new file mode 100644 index 00000000..7a54649c --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/queue.py @@ -0,0 +1,44 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Queue(_OnPrem): + _type = "queue" + _icon_dir = "resources/onprem/queue" + + +class Activemq(_Queue): + _icon = "activemq.png" + + +class Celery(_Queue): + _icon = "celery.png" + + +class Emqx(_Queue): + _icon = "emqx.png" + + +class Kafka(_Queue): + _icon = "kafka.png" + + +class Nats(_Queue): + _icon = "nats.png" + + +class Rabbitmq(_Queue): + _icon = "rabbitmq.png" + + +class Zeromq(_Queue): + _icon = "zeromq.png" + + +# Aliases + +ActiveMQ = Activemq +EMQX = Emqx +RabbitMQ = Rabbitmq +ZeroMQ = Zeromq diff --git a/.venv/Lib/site-packages/diagrams/onprem/registry.py b/.venv/Lib/site-packages/diagrams/onprem/registry.py new file mode 100644 index 00000000..259df50e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/registry.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Registry(_OnPrem): + _type = "registry" + _icon_dir = "resources/onprem/registry" + + +class Harbor(_Registry): + _icon = "harbor.png" + + +class Jfrog(_Registry): + _icon = "jfrog.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/search.py b/.venv/Lib/site-packages/diagrams/onprem/search.py new file mode 100644 index 00000000..da5f0b4e --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/search.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Search(_OnPrem): + _type = "search" + _icon_dir = "resources/onprem/search" + + +class Solr(_Search): + _icon = "solr.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/security.py b/.venv/Lib/site-packages/diagrams/onprem/security.py new file mode 100644 index 00000000..d0447421 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/security.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Security(_OnPrem): + _type = "security" + _icon_dir = "resources/onprem/security" + + +class Bitwarden(_Security): + _icon = "bitwarden.png" + + +class Trivy(_Security): + _icon = "trivy.png" + + +class Vault(_Security): + _icon = "vault.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/storage.py b/.venv/Lib/site-packages/diagrams/onprem/storage.py new file mode 100644 index 00000000..9462b7c1 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/storage.py @@ -0,0 +1,30 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Storage(_OnPrem): + _type = "storage" + _icon_dir = "resources/onprem/storage" + + +class CephOsd(_Storage): + _icon = "ceph-osd.png" + + +class Ceph(_Storage): + _icon = "ceph.png" + + +class Glusterfs(_Storage): + _icon = "glusterfs.png" + + +class Portworx(_Storage): + _icon = "portworx.png" + + +# Aliases + +CEPH = Ceph +CEPH_OSD = CephOsd diff --git a/.venv/Lib/site-packages/diagrams/onprem/tracing.py b/.venv/Lib/site-packages/diagrams/onprem/tracing.py new file mode 100644 index 00000000..f42fef25 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/tracing.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Tracing(_OnPrem): + _type = "tracing" + _icon_dir = "resources/onprem/tracing" + + +class Jaeger(_Tracing): + _icon = "jaeger.png" + + +class Tempo(_Tracing): + _icon = "tempo.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/vcs.py b/.venv/Lib/site-packages/diagrams/onprem/vcs.py new file mode 100644 index 00000000..d5f06644 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/vcs.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Vcs(_OnPrem): + _type = "vcs" + _icon_dir = "resources/onprem/vcs" + + +class Git(_Vcs): + _icon = "git.png" + + +class Gitea(_Vcs): + _icon = "gitea.png" + + +class Github(_Vcs): + _icon = "github.png" + + +class Gitlab(_Vcs): + _icon = "gitlab.png" + + +class Svn(_Vcs): + _icon = "svn.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/onprem/workflow.py b/.venv/Lib/site-packages/diagrams/onprem/workflow.py new file mode 100644 index 00000000..db6ea3b0 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/onprem/workflow.py @@ -0,0 +1,30 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OnPrem + + +class _Workflow(_OnPrem): + _type = "workflow" + _icon_dir = "resources/onprem/workflow" + + +class Airflow(_Workflow): + _icon = "airflow.png" + + +class Digdag(_Workflow): + _icon = "digdag.png" + + +class Kubeflow(_Workflow): + _icon = "kubeflow.png" + + +class Nifi(_Workflow): + _icon = "nifi.png" + + +# Aliases + +KubeFlow = Kubeflow +NiFi = Nifi diff --git a/.venv/Lib/site-packages/diagrams/openstack/__init__.py b/.venv/Lib/site-packages/diagrams/openstack/__init__.py new file mode 100644 index 00000000..46541103 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/__init__.py @@ -0,0 +1,16 @@ +""" +Openstack provides a set of general OpenStack services. +""" + +from diagrams import Node + + +class _OpenStack(Node): + _provider = "openstack" + _icon_dir = "resources/openstack" + + fontcolor = "#ffffff" + + +class OpenStack(_OpenStack): + _icon = "openstack.png" diff --git a/.venv/Lib/site-packages/diagrams/openstack/adjacentenablers.py b/.venv/Lib/site-packages/diagrams/openstack/adjacentenablers.py new file mode 100644 index 00000000..08b69a52 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/adjacentenablers.py @@ -0,0 +1,11 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Adjacentenablers(_OpenStack): + _type = "adjacentenablers" + _icon_dir = "resources/openstack/adjacentenablers" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/apiproxies.py b/.venv/Lib/site-packages/diagrams/openstack/apiproxies.py new file mode 100644 index 00000000..d9b642bc --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/apiproxies.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Apiproxies(_OpenStack): + _type = "apiproxies" + _icon_dir = "resources/openstack/apiproxies" + + +class EC2API(_Apiproxies): + _icon = "ec2api.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/applicationlifecycle.py b/.venv/Lib/site-packages/diagrams/openstack/applicationlifecycle.py new file mode 100644 index 00000000..06aad638 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/applicationlifecycle.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Applicationlifecycle(_OpenStack): + _type = "applicationlifecycle" + _icon_dir = "resources/openstack/applicationlifecycle" + + +class Freezer(_Applicationlifecycle): + _icon = "freezer.png" + + +class Masakari(_Applicationlifecycle): + _icon = "masakari.png" + + +class Murano(_Applicationlifecycle): + _icon = "murano.png" + + +class Solum(_Applicationlifecycle): + _icon = "solum.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/baremetal.py b/.venv/Lib/site-packages/diagrams/openstack/baremetal.py new file mode 100644 index 00000000..f4535e0b --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/baremetal.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Baremetal(_OpenStack): + _type = "baremetal" + _icon_dir = "resources/openstack/baremetal" + + +class Cyborg(_Baremetal): + _icon = "cyborg.png" + + +class Ironic(_Baremetal): + _icon = "ironic.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/billing.py b/.venv/Lib/site-packages/diagrams/openstack/billing.py new file mode 100644 index 00000000..fabe17fb --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/billing.py @@ -0,0 +1,17 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Billing(_OpenStack): + _type = "billing" + _icon_dir = "resources/openstack/billing" + + +class Cloudkitty(_Billing): + _icon = "cloudkitty.png" + + +# Aliases + +CloudKitty = Cloudkitty diff --git a/.venv/Lib/site-packages/diagrams/openstack/compute.py b/.venv/Lib/site-packages/diagrams/openstack/compute.py new file mode 100644 index 00000000..74901087 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/compute.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Compute(_OpenStack): + _type = "compute" + _icon_dir = "resources/openstack/compute" + + +class Nova(_Compute): + _icon = "nova.png" + + +class Qinling(_Compute): + _icon = "qinling.png" + + +class Zun(_Compute): + _icon = "zun.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/containerservices.py b/.venv/Lib/site-packages/diagrams/openstack/containerservices.py new file mode 100644 index 00000000..cf3690f6 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/containerservices.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Containerservices(_OpenStack): + _type = "containerservices" + _icon_dir = "resources/openstack/containerservices" + + +class Kuryr(_Containerservices): + _icon = "kuryr.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/deployment.py b/.venv/Lib/site-packages/diagrams/openstack/deployment.py new file mode 100644 index 00000000..fba5d4b3 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/deployment.py @@ -0,0 +1,38 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Deployment(_OpenStack): + _type = "deployment" + _icon_dir = "resources/openstack/deployment" + + +class Ansible(_Deployment): + _icon = "ansible.png" + + +class Charms(_Deployment): + _icon = "charms.png" + + +class Chef(_Deployment): + _icon = "chef.png" + + +class Helm(_Deployment): + _icon = "helm.png" + + +class Kolla(_Deployment): + _icon = "kolla.png" + + +class Tripleo(_Deployment): + _icon = "tripleo.png" + + +# Aliases + +KollaAnsible = Kolla +TripleO = Tripleo diff --git a/.venv/Lib/site-packages/diagrams/openstack/frontend.py b/.venv/Lib/site-packages/diagrams/openstack/frontend.py new file mode 100644 index 00000000..b490f022 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/frontend.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Frontend(_OpenStack): + _type = "frontend" + _icon_dir = "resources/openstack/frontend" + + +class Horizon(_Frontend): + _icon = "horizon.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/lifecyclemanagement.py b/.venv/Lib/site-packages/diagrams/openstack/lifecyclemanagement.py new file mode 100644 index 00000000..d657ba45 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/lifecyclemanagement.py @@ -0,0 +1,11 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Lifecyclemanagement(_OpenStack): + _type = "lifecyclemanagement" + _icon_dir = "resources/openstack/lifecyclemanagement" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/monitoring.py b/.venv/Lib/site-packages/diagrams/openstack/monitoring.py new file mode 100644 index 00000000..d1379265 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/monitoring.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Monitoring(_OpenStack): + _type = "monitoring" + _icon_dir = "resources/openstack/monitoring" + + +class Monasca(_Monitoring): + _icon = "monasca.png" + + +class Telemetry(_Monitoring): + _icon = "telemetry.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/multiregion.py b/.venv/Lib/site-packages/diagrams/openstack/multiregion.py new file mode 100644 index 00000000..2d7c132c --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/multiregion.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Multiregion(_OpenStack): + _type = "multiregion" + _icon_dir = "resources/openstack/multiregion" + + +class Tricircle(_Multiregion): + _icon = "tricircle.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/networking.py b/.venv/Lib/site-packages/diagrams/openstack/networking.py new file mode 100644 index 00000000..24226c9c --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/networking.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Networking(_OpenStack): + _type = "networking" + _icon_dir = "resources/openstack/networking" + + +class Designate(_Networking): + _icon = "designate.png" + + +class Neutron(_Networking): + _icon = "neutron.png" + + +class Octavia(_Networking): + _icon = "octavia.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/nfv.py b/.venv/Lib/site-packages/diagrams/openstack/nfv.py new file mode 100644 index 00000000..59f05598 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/nfv.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _NFV(_OpenStack): + _type = "nfv" + _icon_dir = "resources/openstack/nfv" + + +class Tacker(_NFV): + _icon = "tacker.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/operations.py b/.venv/Lib/site-packages/diagrams/openstack/operations.py new file mode 100644 index 00000000..1e954f9f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/operations.py @@ -0,0 +1,11 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Operations(_OpenStack): + _type = "operations" + _icon_dir = "resources/openstack/operations" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/optimization.py b/.venv/Lib/site-packages/diagrams/openstack/optimization.py new file mode 100644 index 00000000..71e611cf --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/optimization.py @@ -0,0 +1,27 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Optimization(_OpenStack): + _type = "optimization" + _icon_dir = "resources/openstack/optimization" + + +class Congress(_Optimization): + _icon = "congress.png" + + +class Rally(_Optimization): + _icon = "rally.png" + + +class Vitrage(_Optimization): + _icon = "vitrage.png" + + +class Watcher(_Optimization): + _icon = "watcher.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/orchestration.py b/.venv/Lib/site-packages/diagrams/openstack/orchestration.py new file mode 100644 index 00000000..1718dd86 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/orchestration.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Orchestration(_OpenStack): + _type = "orchestration" + _icon_dir = "resources/openstack/orchestration" + + +class Blazar(_Orchestration): + _icon = "blazar.png" + + +class Heat(_Orchestration): + _icon = "heat.png" + + +class Mistral(_Orchestration): + _icon = "mistral.png" + + +class Senlin(_Orchestration): + _icon = "senlin.png" + + +class Zaqar(_Orchestration): + _icon = "zaqar.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/packaging.py b/.venv/Lib/site-packages/diagrams/openstack/packaging.py new file mode 100644 index 00000000..47a61c37 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/packaging.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Packaging(_OpenStack): + _type = "packaging" + _icon_dir = "resources/openstack/packaging" + + +class LOCI(_Packaging): + _icon = "loci.png" + + +class Puppet(_Packaging): + _icon = "puppet.png" + + +class RPM(_Packaging): + _icon = "rpm.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/sharedservices.py b/.venv/Lib/site-packages/diagrams/openstack/sharedservices.py new file mode 100644 index 00000000..d01634ef --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/sharedservices.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Sharedservices(_OpenStack): + _type = "sharedservices" + _icon_dir = "resources/openstack/sharedservices" + + +class Barbican(_Sharedservices): + _icon = "barbican.png" + + +class Glance(_Sharedservices): + _icon = "glance.png" + + +class Karbor(_Sharedservices): + _icon = "karbor.png" + + +class Keystone(_Sharedservices): + _icon = "keystone.png" + + +class Searchlight(_Sharedservices): + _icon = "searchlight.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/storage.py b/.venv/Lib/site-packages/diagrams/openstack/storage.py new file mode 100644 index 00000000..6c5ddf15 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/storage.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Storage(_OpenStack): + _type = "storage" + _icon_dir = "resources/openstack/storage" + + +class Cinder(_Storage): + _icon = "cinder.png" + + +class Manila(_Storage): + _icon = "manila.png" + + +class Swift(_Storage): + _icon = "swift.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/openstack/user.py b/.venv/Lib/site-packages/diagrams/openstack/user.py new file mode 100644 index 00000000..b3a9e6f9 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/user.py @@ -0,0 +1,17 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _User(_OpenStack): + _type = "user" + _icon_dir = "resources/openstack/user" + + +class Openstackclient(_User): + _icon = "openstackclient.png" + + +# Aliases + +OpenStackClient = Openstackclient diff --git a/.venv/Lib/site-packages/diagrams/openstack/workloadprovisioning.py b/.venv/Lib/site-packages/diagrams/openstack/workloadprovisioning.py new file mode 100644 index 00000000..d43f37ae --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/openstack/workloadprovisioning.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _OpenStack + + +class _Workloadprovisioning(_OpenStack): + _type = "workloadprovisioning" + _icon_dir = "resources/openstack/workloadprovisioning" + + +class Magnum(_Workloadprovisioning): + _icon = "magnum.png" + + +class Sahara(_Workloadprovisioning): + _icon = "sahara.png" + + +class Trove(_Workloadprovisioning): + _icon = "trove.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/outscale/__init__.py b/.venv/Lib/site-packages/diagrams/outscale/__init__.py new file mode 100644 index 00000000..391ac4dd --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/outscale/__init__.py @@ -0,0 +1,12 @@ +from diagrams import Node + + +class _Outscale(Node): + _provider = "outscale" + _icon_dir = "resources/outscale" + + fontcolor = "#ffffff" + + +class Outscale(_Outscale): + _icon = "outscale.png" diff --git a/.venv/Lib/site-packages/diagrams/outscale/compute.py b/.venv/Lib/site-packages/diagrams/outscale/compute.py new file mode 100644 index 00000000..35a09619 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/outscale/compute.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Outscale + + +class _Compute(_Outscale): + _type = "compute" + _icon_dir = "resources/outscale/compute" + + +class Compute(_Compute): + _icon = "compute.png" + + +class DirectConnect(_Compute): + _icon = "direct-connect.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/outscale/network.py b/.venv/Lib/site-packages/diagrams/outscale/network.py new file mode 100644 index 00000000..f5a18376 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/outscale/network.py @@ -0,0 +1,35 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Outscale + + +class _Network(_Outscale): + _type = "network" + _icon_dir = "resources/outscale/network" + + +class ClientVpn(_Network): + _icon = "client-vpn.png" + + +class InternetService(_Network): + _icon = "internet-service.png" + + +class LoadBalancer(_Network): + _icon = "load-balancer.png" + + +class NatService(_Network): + _icon = "nat-service.png" + + +class Net(_Network): + _icon = "net.png" + + +class SiteToSiteVpng(_Network): + _icon = "site-to-site-vpng.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/outscale/security.py b/.venv/Lib/site-packages/diagrams/outscale/security.py new file mode 100644 index 00000000..8e79df51 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/outscale/security.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Outscale + + +class _Security(_Outscale): + _type = "security" + _icon_dir = "resources/outscale/security" + + +class Firewall(_Security): + _icon = "firewall.png" + + +class IdentityAndAccessManagement(_Security): + _icon = "identity-and-access-management.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/outscale/storage.py b/.venv/Lib/site-packages/diagrams/outscale/storage.py new file mode 100644 index 00000000..1a8f3171 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/outscale/storage.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Outscale + + +class _Storage(_Outscale): + _type = "storage" + _icon_dir = "resources/outscale/storage" + + +class SimpleStorageService(_Storage): + _icon = "simple-storage-service.png" + + +class Storage(_Storage): + _icon = "storage.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/programming/__init__.py b/.venv/Lib/site-packages/diagrams/programming/__init__.py new file mode 100644 index 00000000..504cea66 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/programming/__init__.py @@ -0,0 +1,16 @@ +""" +Programming provides a set of programming languages and frameworks. +""" + +from diagrams import Node + + +class _Programming(Node): + _provider = "programming" + _icon_dir = "resources/programming" + + fontcolor = "#ffffff" + + +class Programming(_Programming): + _icon = "programming.png" diff --git a/.venv/Lib/site-packages/diagrams/programming/flowchart.py b/.venv/Lib/site-packages/diagrams/programming/flowchart.py new file mode 100644 index 00000000..1e7ecf17 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/programming/flowchart.py @@ -0,0 +1,107 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Programming + + +class _Flowchart(_Programming): + _type = "flowchart" + _icon_dir = "resources/programming/flowchart" + + +class Action(_Flowchart): + _icon = "action.png" + + +class Collate(_Flowchart): + _icon = "collate.png" + + +class Database(_Flowchart): + _icon = "database.png" + + +class Decision(_Flowchart): + _icon = "decision.png" + + +class Delay(_Flowchart): + _icon = "delay.png" + + +class Display(_Flowchart): + _icon = "display.png" + + +class Document(_Flowchart): + _icon = "document.png" + + +class InputOutput(_Flowchart): + _icon = "input-output.png" + + +class Inspection(_Flowchart): + _icon = "inspection.png" + + +class InternalStorage(_Flowchart): + _icon = "internal-storage.png" + + +class LoopLimit(_Flowchart): + _icon = "loop-limit.png" + + +class ManualInput(_Flowchart): + _icon = "manual-input.png" + + +class ManualLoop(_Flowchart): + _icon = "manual-loop.png" + + +class Merge(_Flowchart): + _icon = "merge.png" + + +class MultipleDocuments(_Flowchart): + _icon = "multiple-documents.png" + + +class OffPageConnectorLeft(_Flowchart): + _icon = "off-page-connector-left.png" + + +class OffPageConnectorRight(_Flowchart): + _icon = "off-page-connector-right.png" + + +class Or(_Flowchart): + _icon = "or.png" + + +class PredefinedProcess(_Flowchart): + _icon = "predefined-process.png" + + +class Preparation(_Flowchart): + _icon = "preparation.png" + + +class Sort(_Flowchart): + _icon = "sort.png" + + +class StartEnd(_Flowchart): + _icon = "start-end.png" + + +class StoredData(_Flowchart): + _icon = "stored-data.png" + + +class SummingJunction(_Flowchart): + _icon = "summing-junction.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/programming/framework.py b/.venv/Lib/site-packages/diagrams/programming/framework.py new file mode 100644 index 00000000..5b2cc3ea --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/programming/framework.py @@ -0,0 +1,116 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Programming + + +class _Framework(_Programming): + _type = "framework" + _icon_dir = "resources/programming/framework" + + +class Angular(_Framework): + _icon = "angular.png" + + +class Backbone(_Framework): + _icon = "backbone.png" + + +class Camel(_Framework): + _icon = "camel.png" + + +class Django(_Framework): + _icon = "django.png" + + +class Dotnet(_Framework): + _icon = "dotnet.png" + + +class Ember(_Framework): + _icon = "ember.png" + + +class Fastapi(_Framework): + _icon = "fastapi.png" + + +class Flask(_Framework): + _icon = "flask.png" + + +class Flutter(_Framework): + _icon = "flutter.png" + + +class Graphql(_Framework): + _icon = "graphql.png" + + +class Hibernate(_Framework): + _icon = "hibernate.png" + + +class Jhipster(_Framework): + _icon = "jhipster.png" + + +class Laravel(_Framework): + _icon = "laravel.png" + + +class Micronaut(_Framework): + _icon = "micronaut.png" + + +class Nextjs(_Framework): + _icon = "nextjs.png" + + +class Phoenix(_Framework): + _icon = "phoenix.png" + + +class Quarkus(_Framework): + _icon = "quarkus.png" + + +class Rails(_Framework): + _icon = "rails.png" + + +class React(_Framework): + _icon = "react.png" + + +class Spring(_Framework): + _icon = "spring.png" + + +class Sqlpage(_Framework): + _icon = "sqlpage.png" + + +class Starlette(_Framework): + _icon = "starlette.png" + + +class Svelte(_Framework): + _icon = "svelte.png" + + +class Vercel(_Framework): + _icon = "vercel.png" + + +class Vue(_Framework): + _icon = "vue.png" + + +# Aliases + +FastAPI = Fastapi +GraphQL = Graphql +DotNet = Dotnet +NextJs = Nextjs diff --git a/.venv/Lib/site-packages/diagrams/programming/language.py b/.venv/Lib/site-packages/diagrams/programming/language.py new file mode 100644 index 00000000..a747dbfc --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/programming/language.py @@ -0,0 +1,108 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Programming + + +class _Language(_Programming): + _type = "language" + _icon_dir = "resources/programming/language" + + +class Bash(_Language): + _icon = "bash.png" + + +class C(_Language): + _icon = "c.png" + + +class Cpp(_Language): + _icon = "cpp.png" + + +class Csharp(_Language): + _icon = "csharp.png" + + +class Dart(_Language): + _icon = "dart.png" + + +class Elixir(_Language): + _icon = "elixir.png" + + +class Erlang(_Language): + _icon = "erlang.png" + + +class Go(_Language): + _icon = "go.png" + + +class Java(_Language): + _icon = "java.png" + + +class Javascript(_Language): + _icon = "javascript.png" + + +class Kotlin(_Language): + _icon = "kotlin.png" + + +class Latex(_Language): + _icon = "latex.png" + + +class Matlab(_Language): + _icon = "matlab.png" + + +class Nodejs(_Language): + _icon = "nodejs.png" + + +class Php(_Language): + _icon = "php.png" + + +class Python(_Language): + _icon = "python.png" + + +class R(_Language): + _icon = "r.png" + + +class Ruby(_Language): + _icon = "ruby.png" + + +class Rust(_Language): + _icon = "rust.png" + + +class Scala(_Language): + _icon = "scala.png" + + +class Sql(_Language): + _icon = "sql.png" + + +class Swift(_Language): + _icon = "swift.png" + + +class Typescript(_Language): + _icon = "typescript.png" + + +# Aliases + +JavaScript = Javascript +NodeJS = Nodejs +PHP = Php +TypeScript = Typescript diff --git a/.venv/Lib/site-packages/diagrams/programming/runtime.py b/.venv/Lib/site-packages/diagrams/programming/runtime.py new file mode 100644 index 00000000..2a3a9aa2 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/programming/runtime.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Programming + + +class _Runtime(_Programming): + _type = "runtime" + _icon_dir = "resources/programming/runtime" + + +class Dapr(_Runtime): + _icon = "dapr.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/__init__.py b/.venv/Lib/site-packages/diagrams/saas/__init__.py new file mode 100644 index 00000000..e0acaccd --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/__init__.py @@ -0,0 +1,16 @@ +""" +Saas provides a set of general saas services. +""" + +from diagrams import Node + + +class _Saas(Node): + _provider = "saas" + _icon_dir = "resources/saas" + + fontcolor = "#ffffff" + + +class Saas(_Saas): + _icon = "saas.png" diff --git a/.venv/Lib/site-packages/diagrams/saas/alerting.py b/.venv/Lib/site-packages/diagrams/saas/alerting.py new file mode 100644 index 00000000..0df15d83 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/alerting.py @@ -0,0 +1,31 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Alerting(_Saas): + _type = "alerting" + _icon_dir = "resources/saas/alerting" + + +class Newrelic(_Alerting): + _icon = "newrelic.png" + + +class Opsgenie(_Alerting): + _icon = "opsgenie.png" + + +class Pagerduty(_Alerting): + _icon = "pagerduty.png" + + +class Pushover(_Alerting): + _icon = "pushover.png" + + +class Xmatters(_Alerting): + _icon = "xmatters.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/analytics.py b/.venv/Lib/site-packages/diagrams/saas/analytics.py new file mode 100644 index 00000000..6d4f62c8 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/analytics.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Analytics(_Saas): + _type = "analytics" + _icon_dir = "resources/saas/analytics" + + +class Dataform(_Analytics): + _icon = "dataform.png" + + +class Snowflake(_Analytics): + _icon = "snowflake.png" + + +class Stitch(_Analytics): + _icon = "stitch.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/automation.py b/.venv/Lib/site-packages/diagrams/saas/automation.py new file mode 100644 index 00000000..ef047d5f --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/automation.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Automation(_Saas): + _type = "automation" + _icon_dir = "resources/saas/automation" + + +class N8N(_Automation): + _icon = "n8n.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/cdn.py b/.venv/Lib/site-packages/diagrams/saas/cdn.py new file mode 100644 index 00000000..cc0b4fc2 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/cdn.py @@ -0,0 +1,23 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Cdn(_Saas): + _type = "cdn" + _icon_dir = "resources/saas/cdn" + + +class Akamai(_Cdn): + _icon = "akamai.png" + + +class Cloudflare(_Cdn): + _icon = "cloudflare.png" + + +class Fastly(_Cdn): + _icon = "fastly.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/chat.py b/.venv/Lib/site-packages/diagrams/saas/chat.py new file mode 100644 index 00000000..446d4c91 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/chat.py @@ -0,0 +1,43 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Chat(_Saas): + _type = "chat" + _icon_dir = "resources/saas/chat" + + +class Discord(_Chat): + _icon = "discord.png" + + +class Line(_Chat): + _icon = "line.png" + + +class Mattermost(_Chat): + _icon = "mattermost.png" + + +class Messenger(_Chat): + _icon = "messenger.png" + + +class RocketChat(_Chat): + _icon = "rocket-chat.png" + + +class Slack(_Chat): + _icon = "slack.png" + + +class Teams(_Chat): + _icon = "teams.png" + + +class Telegram(_Chat): + _icon = "telegram.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/communication.py b/.venv/Lib/site-packages/diagrams/saas/communication.py new file mode 100644 index 00000000..7d3a6f02 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/communication.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Communication(_Saas): + _type = "communication" + _icon_dir = "resources/saas/communication" + + +class Twilio(_Communication): + _icon = "twilio.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/crm.py b/.venv/Lib/site-packages/diagrams/saas/crm.py new file mode 100644 index 00000000..56b1bef2 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/crm.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Crm(_Saas): + _type = "crm" + _icon_dir = "resources/saas/crm" + + +class Intercom(_Crm): + _icon = "intercom.png" + + +class Zendesk(_Crm): + _icon = "zendesk.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/filesharing.py b/.venv/Lib/site-packages/diagrams/saas/filesharing.py new file mode 100644 index 00000000..c17eef14 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/filesharing.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Filesharing(_Saas): + _type = "filesharing" + _icon_dir = "resources/saas/filesharing" + + +class Nextcloud(_Filesharing): + _icon = "nextcloud.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/identity.py b/.venv/Lib/site-packages/diagrams/saas/identity.py new file mode 100644 index 00000000..10eafad1 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/identity.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Identity(_Saas): + _type = "identity" + _icon_dir = "resources/saas/identity" + + +class Auth0(_Identity): + _icon = "auth0.png" + + +class Okta(_Identity): + _icon = "okta.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/logging.py b/.venv/Lib/site-packages/diagrams/saas/logging.py new file mode 100644 index 00000000..fd8a28cd --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/logging.py @@ -0,0 +1,26 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Logging(_Saas): + _type = "logging" + _icon_dir = "resources/saas/logging" + + +class Datadog(_Logging): + _icon = "datadog.png" + + +class Newrelic(_Logging): + _icon = "newrelic.png" + + +class Papertrail(_Logging): + _icon = "papertrail.png" + + +# Aliases + +DataDog = Datadog +NewRelic = Newrelic diff --git a/.venv/Lib/site-packages/diagrams/saas/media.py b/.venv/Lib/site-packages/diagrams/saas/media.py new file mode 100644 index 00000000..f10ce8da --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/media.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Media(_Saas): + _type = "media" + _icon_dir = "resources/saas/media" + + +class Cloudinary(_Media): + _icon = "cloudinary.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/recommendation.py b/.venv/Lib/site-packages/diagrams/saas/recommendation.py new file mode 100644 index 00000000..07a2dd50 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/recommendation.py @@ -0,0 +1,15 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Recommendation(_Saas): + _type = "recommendation" + _icon_dir = "resources/saas/recommendation" + + +class Recombee(_Recommendation): + _icon = "recombee.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/security.py b/.venv/Lib/site-packages/diagrams/saas/security.py new file mode 100644 index 00000000..ff844f84 --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/security.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Security(_Saas): + _type = "security" + _icon_dir = "resources/saas/security" + + +class Crowdstrike(_Security): + _icon = "crowdstrike.png" + + +class Sonarqube(_Security): + _icon = "sonarqube.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/diagrams/saas/social.py b/.venv/Lib/site-packages/diagrams/saas/social.py new file mode 100644 index 00000000..6ad326db --- /dev/null +++ b/.venv/Lib/site-packages/diagrams/saas/social.py @@ -0,0 +1,19 @@ +# This module is automatically generated by autogen.sh. DO NOT EDIT. + +from . import _Saas + + +class _Social(_Saas): + _type = "social" + _icon_dir = "resources/saas/social" + + +class Facebook(_Social): + _icon = "facebook.png" + + +class Twitter(_Social): + _icon = "twitter.png" + + +# Aliases diff --git a/.venv/Lib/site-packages/distlib-0.3.9.dist-info/INSTALLER b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/distlib-0.3.9.dist-info/LICENSE.txt b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/LICENSE.txt new file mode 100644 index 00000000..c31ac56d --- /dev/null +++ b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/LICENSE.txt @@ -0,0 +1,284 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.2 2.1.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2.1 2.2 2002 PSF yes + 2.2.2 2.2.1 2002 PSF yes + 2.2.3 2.2.2 2003 PSF yes + 2.3 2.2.2 2002-2003 PSF yes + 2.3.1 2.3 2002-2003 PSF yes + 2.3.2 2.3.1 2002-2003 PSF yes + 2.3.3 2.3.2 2002-2003 PSF yes + 2.3.4 2.3.3 2004 PSF yes + 2.3.5 2.3.4 2005 PSF yes + 2.4 2.3 2004 PSF yes + 2.4.1 2.4 2005 PSF yes + 2.4.2 2.4.1 2005 PSF yes + 2.4.3 2.4.2 2006 PSF yes + 2.4.4 2.4.3 2006 PSF yes + 2.5 2.4 2006 PSF yes + 2.5.1 2.5 2007 PSF yes + 2.5.2 2.5.1 2008 PSF yes + 2.5.3 2.5.2 2008 PSF yes + 2.6 2.5 2008 PSF yes + 2.6.1 2.6 2008 PSF yes + 2.6.2 2.6.1 2009 PSF yes + 2.6.3 2.6.2 2009 PSF yes + 2.6.4 2.6.3 2009 PSF yes + 2.6.5 2.6.4 2010 PSF yes + 3.0 2.6 2008 PSF yes + 3.0.1 3.0 2009 PSF yes + 3.1 3.0.1 2009 PSF yes + 3.1.1 3.1 2009 PSF yes + 3.1.2 3.1 2010 PSF yes + 3.2 3.1 2010 PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 +Python Software Foundation; All Rights Reserved" are retained in Python alone or +in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/.venv/Lib/site-packages/distlib-0.3.9.dist-info/METADATA b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/METADATA new file mode 100644 index 00000000..57304b8f --- /dev/null +++ b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/METADATA @@ -0,0 +1,118 @@ +Metadata-Version: 2.1 +Name: distlib +Version: 0.3.9 +Summary: Distribution utilities +Home-page: https://github.com/pypa/distlib +Author: Vinay Sajip +Author-email: vinay_sajip@red-dove.com +License: PSF-2.0 +Project-URL: Documentation, https://distlib.readthedocs.io/ +Project-URL: Source, https://github.com/pypa/distlib +Project-URL: Tracker, https://github.com/pypa/distlib/issues +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Software Development +License-File: LICENSE.txt + +|badge1| |badge2| + +.. |badge1| image:: https://img.shields.io/github/actions/workflow/status/pypa/distlib/package-tests.yml + :alt: GitHub Workflow Status (with event) + +.. |badge2| image:: https://img.shields.io/codecov/c/github/pypa/distlib + :target: https://app.codecov.io/gh/pypa/distlib + :alt: GitHub coverage status + +What is it? +----------- + +Distlib is a library which implements low-level functions that relate to +packaging and distribution of Python software. It is intended to be used as the +basis for third-party packaging tools. The documentation is available at + +https://distlib.readthedocs.io/ + +Main features +------------- + +Distlib currently offers the following features: + +* The package ``distlib.database``, which implements a database of installed + distributions, as defined by :pep:`376`, and distribution dependency graph + logic. Support is also provided for non-installed distributions (i.e. + distributions registered with metadata on an index like PyPI), including + the ability to scan for dependencies and building dependency graphs. +* The package ``distlib.index``, which implements an interface to perform + operations on an index, such as registering a project, uploading a + distribution or uploading documentation. Support is included for verifying + SSL connections (with domain matching) and signing/verifying packages using + GnuPG. +* The package ``distlib.metadata``, which implements distribution metadata as + defined by :pep:`643`, :pep:`566`, :pep:`345`, :pep:`314` and :pep:`241`. +* The package ``distlib.markers``, which implements environment markers as + defined by :pep:`508`. +* The package ``distlib.manifest``, which implements lists of files used + in packaging source distributions. +* The package ``distlib.locators``, which allows finding distributions, whether + on PyPI (XML-RPC or via the "simple" interface), local directories or some + other source. +* The package ``distlib.resources``, which allows access to data files stored + in Python packages, both in the file system and in .zip files. +* The package ``distlib.scripts``, which allows installing of scripts with + adjustment of shebang lines and support for native Windows executable + launchers. +* The package ``distlib.version``, which implements version specifiers as + defined by :pep:`440`, but also support for working with "legacy" versions and + semantic versions. +* The package ``distlib.wheel``, which provides support for building and + installing from the Wheel format for binary distributions (see :pep:`427`). +* The package ``distlib.util``, which contains miscellaneous functions and + classes which are useful in packaging, but which do not fit neatly into + one of the other packages in ``distlib``.* The package implements enhanced + globbing functionality such as the ability to use ``**`` in patterns to + specify recursing into subdirectories. + + +Python version and platform compatibility +----------------------------------------- + +Distlib is intended to be used on and is tested on Python versions 2.7 and 3.6 or later, +pypy-2.7 and pypy3 on Linux, Windows, and macOS. + +Project status +-------------- + +The project has reached a mature status in its development: there is a comprehensive +test suite and it has been exercised on Windows, Ubuntu and macOS. The project is used +by well-known projects such as `pip `_ and `caniusepython3 +`_. + +This project was migrated from Mercurial to Git and from BitBucket to GitHub, and +although all information of importance has been retained across the migration, some +commit references in issues and issue comments may have become invalid. + +Code of Conduct +--------------- + +Everyone interacting in the distlib project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/.venv/Lib/site-packages/distlib-0.3.9.dist-info/RECORD b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/RECORD new file mode 100644 index 00000000..9c10a6f0 --- /dev/null +++ b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/RECORD @@ -0,0 +1,38 @@ +distlib-0.3.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +distlib-0.3.9.dist-info/LICENSE.txt,sha256=gI4QyKarjesUn_mz-xn0R6gICUYG1xKpylf-rTVSWZ0,14531 +distlib-0.3.9.dist-info/METADATA,sha256=GfbwA0gg2QzJ2oWxlg-OHyPG1Q_wWM2NzEtWg-EfxDc,5246 +distlib-0.3.9.dist-info/RECORD,, +distlib-0.3.9.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +distlib-0.3.9.dist-info/top_level.txt,sha256=9BERqitu_vzyeyILOcGzX9YyA2AB_xlC4-81V6xoizk,8 +distlib/__init__.py,sha256=dcwgYGYGQqAEawBXPDtIx80DO_3cOmFv8HTc8JMzknQ,625 +distlib/__pycache__/__init__.cpython-312.pyc,, +distlib/__pycache__/compat.cpython-312.pyc,, +distlib/__pycache__/database.cpython-312.pyc,, +distlib/__pycache__/index.cpython-312.pyc,, +distlib/__pycache__/locators.cpython-312.pyc,, +distlib/__pycache__/manifest.cpython-312.pyc,, +distlib/__pycache__/markers.cpython-312.pyc,, +distlib/__pycache__/metadata.cpython-312.pyc,, +distlib/__pycache__/resources.cpython-312.pyc,, +distlib/__pycache__/scripts.cpython-312.pyc,, +distlib/__pycache__/util.cpython-312.pyc,, +distlib/__pycache__/version.cpython-312.pyc,, +distlib/__pycache__/wheel.cpython-312.pyc,, +distlib/compat.py,sha256=2jRSjRI4o-vlXeTK2BCGIUhkc6e9ZGhSsacRM5oseTw,41467 +distlib/database.py,sha256=mHy_LxiXIsIVRb-T0-idBrVLw3Ffij5teHCpbjmJ9YU,51160 +distlib/index.py,sha256=lTbw268rRhj8dw1sib3VZ_0EhSGgoJO3FKJzSFMOaeA,20797 +distlib/locators.py,sha256=oBeAZpFuPQSY09MgNnLfQGGAXXvVO96BFpZyKMuK4tM,51026 +distlib/manifest.py,sha256=3qfmAmVwxRqU1o23AlfXrQGZzh6g_GGzTAP_Hb9C5zQ,14168 +distlib/markers.py,sha256=X6sDvkFGcYS8gUW8hfsWuKEKAqhQZAJ7iXOMLxRYjYk,5164 +distlib/metadata.py,sha256=zil3sg2EUfLXVigljY2d_03IJt-JSs7nX-73fECMX2s,38724 +distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820 +distlib/scripts.py,sha256=BJliaDAZaVB7WAkwokgC3HXwLD2iWiHaVI50H7C6eG8,18608 +distlib/t32.exe,sha256=a0GV5kCoWsMutvliiCKmIgV98eRZ33wXoS-XrqvJQVs,97792 +distlib/t64-arm.exe,sha256=68TAa32V504xVBnufojh0PcenpR3U4wAqTqf-MZqbPw,182784 +distlib/t64.exe,sha256=gaYY8hy4fbkHYTTnA4i26ct8IQZzkBG2pRdy0iyuBrc,108032 +distlib/util.py,sha256=vMPGvsS4j9hF6Y9k3Tyom1aaHLb0rFmZAEyzeAdel9w,66682 +distlib/version.py,sha256=s5VIs8wBn0fxzGxWM_aA2ZZyx525HcZbMvcTlTyZ3Rg,23727 +distlib/w32.exe,sha256=R4csx3-OGM9kL4aPIzQKRo5TfmRSHZo6QWyLhDhNBks,91648 +distlib/w64-arm.exe,sha256=xdyYhKj0WDcVUOCb05blQYvzdYIKMbmJn2SZvzkcey4,168448 +distlib/w64.exe,sha256=ejGf-rojoBfXseGLpya6bFTFPWRG21X5KvU8J5iU-K0,101888 +distlib/wheel.py,sha256=DFIVguEQHCdxnSdAO0dfFsgMcvVZitg7bCOuLwZ7A_s,43979 diff --git a/.venv/Lib/site-packages/distlib-0.3.9.dist-info/WHEEL b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/WHEEL new file mode 100644 index 00000000..0b18a281 --- /dev/null +++ b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/distlib-0.3.9.dist-info/top_level.txt b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/top_level.txt new file mode 100644 index 00000000..f68bb072 --- /dev/null +++ b/.venv/Lib/site-packages/distlib-0.3.9.dist-info/top_level.txt @@ -0,0 +1 @@ +distlib diff --git a/.venv/Lib/site-packages/distlib/__init__.py b/.venv/Lib/site-packages/distlib/__init__.py new file mode 100644 index 00000000..bf0d6c6d --- /dev/null +++ b/.venv/Lib/site-packages/distlib/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import logging + +__version__ = '0.3.9' + + +class DistlibException(Exception): + pass + + +try: + from logging import NullHandler +except ImportError: # pragma: no cover + + class NullHandler(logging.Handler): + + def handle(self, record): + pass + + def emit(self, record): + pass + + def createLock(self): + self.lock = None + + +logger = logging.getLogger(__name__) +logger.addHandler(NullHandler()) diff --git a/.venv/Lib/site-packages/distlib/compat.py b/.venv/Lib/site-packages/distlib/compat.py new file mode 100644 index 00000000..ca561dd2 --- /dev/null +++ b/.venv/Lib/site-packages/distlib/compat.py @@ -0,0 +1,1137 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import absolute_import + +import os +import re +import shutil +import sys + +try: + import ssl +except ImportError: # pragma: no cover + ssl = None + +if sys.version_info[0] < 3: # pragma: no cover + from StringIO import StringIO + string_types = basestring, + text_type = unicode + from types import FileType as file_type + import __builtin__ as builtins + import ConfigParser as configparser + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, + pathname2url, ContentTooShortError, splittype) + + def quote(s): + if isinstance(s, unicode): + s = s.encode('utf-8') + return _quote(s) + + import urllib2 + from urllib2 import (Request, urlopen, URLError, HTTPError, + HTTPBasicAuthHandler, HTTPPasswordMgr, HTTPHandler, + HTTPRedirectHandler, build_opener) + if ssl: + from urllib2 import HTTPSHandler + import httplib + import xmlrpclib + import Queue as queue + from HTMLParser import HTMLParser + import htmlentitydefs + raw_input = raw_input + from itertools import ifilter as filter + from itertools import ifilterfalse as filterfalse + + # Leaving this around for now, in case it needs resurrecting in some way + # _userprog = None + # def splituser(host): + # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + # global _userprog + # if _userprog is None: + # import re + # _userprog = re.compile('^(.*)@(.*)$') + + # match = _userprog.match(host) + # if match: return match.group(1, 2) + # return None, host + +else: # pragma: no cover + from io import StringIO + string_types = str, + text_type = str + from io import TextIOWrapper as file_type + import builtins + import configparser + from urllib.parse import (urlparse, urlunparse, urljoin, quote, unquote, + urlsplit, urlunsplit, splittype) + from urllib.request import (urlopen, urlretrieve, Request, url2pathname, + pathname2url, HTTPBasicAuthHandler, + HTTPPasswordMgr, HTTPHandler, + HTTPRedirectHandler, build_opener) + if ssl: + from urllib.request import HTTPSHandler + from urllib.error import HTTPError, URLError, ContentTooShortError + import http.client as httplib + import urllib.request as urllib2 + import xmlrpc.client as xmlrpclib + import queue + from html.parser import HTMLParser + import html.entities as htmlentitydefs + raw_input = input + from itertools import filterfalse + filter = filter + +try: + from ssl import match_hostname, CertificateError +except ImportError: # pragma: no cover + + class CertificateError(ValueError): + pass + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + parts = dn.split('.') + leftmost, remainder = parts[0], parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" % + (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" % + (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +try: + from types import SimpleNamespace as Container +except ImportError: # pragma: no cover + + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: # pragma: no cover + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if os.curdir not in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if normdir not in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + +# ZipFile is a context manager in 2.7, but not in 2.6 + +from zipfile import ZipFile as BaseZipFile + +if hasattr(BaseZipFile, '__enter__'): # pragma: no cover + ZipFile = BaseZipFile +else: # pragma: no cover + from zipfile import ZipExtFile as BaseZipExtFile + + class ZipExtFile(BaseZipExtFile): + + def __init__(self, base): + self.__dict__.update(base.__dict__) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + class ZipFile(BaseZipFile): + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + def open(self, *args, **kwargs): + base = BaseZipFile.open(self, *args, **kwargs) + return ZipExtFile(base) + + +try: + from platform import python_implementation +except ImportError: # pragma: no cover + + def python_implementation(): + """Return a string identifying the Python implementation.""" + if 'PyPy' in sys.version: + return 'PyPy' + if os.name == 'java': + return 'Jython' + if sys.version.startswith('IronPython'): + return 'IronPython' + return 'CPython' + + +import sysconfig + +try: + callable = callable +except NameError: # pragma: no cover + from collections.abc import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode + fsdecode = os.fsdecode +except AttributeError: # pragma: no cover + # Issue #99: on some systems (e.g. containerised), + # sys.getfilesystemencoding() returns None, and we need a real value, + # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and + # sys.getfilesystemencoding(): the return value is "the user’s preference + # according to the result of nl_langinfo(CODESET), or None if the + # nl_langinfo(CODESET) failed." + _fsencoding = sys.getfilesystemencoding() or 'utf-8' + if _fsencoding == 'mbcs': + _fserrors = 'strict' + else: + _fserrors = 'surrogateescape' + + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, text_type): + return filename.encode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + def fsdecode(filename): + if isinstance(filename, text_type): + return filename + elif isinstance(filename, bytes): + return filename.decode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + +try: + from tokenize import detect_encoding +except ImportError: # pragma: no cover + from codecs import BOM_UTF8, lookup + + cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") + + def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + + def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + try: + filename = readline.__self__.name + except AttributeError: + filename = None + bom_found = False + encoding = None + default = 'utf-8' + + def read_or_stop(): + try: + return readline() + except StopIteration: + return b'' + + def find_cookie(line): + try: + # Decode as UTF-8. Either the line is an encoding declaration, + # in which case it should be pure ASCII, or it must be UTF-8 + # per default encoding. + line_string = line.decode('utf-8') + except UnicodeDecodeError: + msg = "invalid or missing encoding declaration" + if filename is not None: + msg = '{} for {!r}'.format(msg, filename) + raise SyntaxError(msg) + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + if filename is None: + msg = "unknown encoding: " + encoding + else: + msg = "unknown encoding for {!r}: {}".format( + filename, encoding) + raise SyntaxError(msg) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + if filename is None: + msg = 'encoding problem: utf-8' + else: + msg = 'encoding problem for {!r}: utf-8'.format( + filename) + raise SyntaxError(msg) + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + + +# For converting & <-> & etc. +try: + from html import escape +except ImportError: + from cgi import escape +if sys.version_info[:2] < (3, 4): + unescape = HTMLParser().unescape +else: + from html import unescape + +try: + from collections import ChainMap +except ImportError: # pragma: no cover + from collections import MutableMapping + + try: + from reprlib import recursive_repr as _recursive_repr + except ImportError: + + def _recursive_repr(fillvalue='...'): + ''' + Decorator to make a repr function return fillvalue for a recursive + call + ''' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, + '__annotations__', {}) + return wrapper + + return decorating_function + + class ChainMap(MutableMapping): + ''' + A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[ + key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__( + key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union( + *self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError( + 'Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError( + 'Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +try: + from importlib.util import cache_from_source # Python >= 3.4 +except ImportError: # pragma: no cover + + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix + + +try: + from collections import OrderedDict +except ImportError: # pragma: no cover + # {{{ http://code.activestate.com/recipes/576693/ (r9) + # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. + # Passes Python2.7's test suite and incorporates all the latest updates. + try: + from thread import get_ident as _get_ident + except ImportError: + from dummy_thread import get_ident as _get_ident + + try: + from _abcoll import KeysView, ValuesView, ItemsView + except ImportError: + pass + + class OrderedDict(dict): + 'Dictionary that remembers insertion order' + + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % + len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args), )) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running=None): + 'od.__repr__() <==> repr(od)' + if not _repr_running: + _repr_running = {} + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__, ) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items, ), inst_dict) + return self.__class__, (items, ) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self) == len( + other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext': 'ext_convert', + 'cfg': 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int( + idx + ) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + # rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance( + value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance( + value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/.venv/Lib/site-packages/distlib/database.py b/.venv/Lib/site-packages/distlib/database.py new file mode 100644 index 00000000..c0f896a7 --- /dev/null +++ b/.venv/Lib/site-packages/distlib/database.py @@ -0,0 +1,1329 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2023 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""PEP 376 implementation.""" + +from __future__ import unicode_literals + +import base64 +import codecs +import contextlib +import hashlib +import logging +import os +import posixpath +import sys +import zipimport + +from . import DistlibException, resources +from .compat import StringIO +from .version import get_scheme, UnsupportedVersionError +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME) +from .util import (parse_requirement, cached_property, parse_name_and_version, read_exports, write_exports, CSVReader, + CSVWriter) + +__all__ = [ + 'Distribution', 'BaseInstalledDistribution', 'InstalledDistribution', 'EggInfoDistribution', 'DistributionPath' +] + +logger = logging.getLogger(__name__) + +EXPORTS_FILENAME = 'pydist-exports.json' +COMMANDS_FILENAME = 'pydist-commands.json' + +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', 'RESOURCES', EXPORTS_FILENAME, 'SHARED') + +DISTINFO_EXT = '.dist-info' + + +class _Cache(object): + """ + A simple cache mapping names and .dist-info paths to distributions + """ + + def __init__(self): + """ + Initialise an instance. There is normally one for each DistributionPath. + """ + self.name = {} + self.path = {} + self.generated = False + + def clear(self): + """ + Clear the cache, setting it to its initial state. + """ + self.name.clear() + self.path.clear() + self.generated = False + + def add(self, dist): + """ + Add a distribution to the cache. + :param dist: The distribution to add. + """ + if dist.path not in self.path: + self.path[dist.path] = dist + self.name.setdefault(dist.key, []).append(dist) + + +class DistributionPath(object): + """ + Represents a set of distributions installed on a path (typically sys.path). + """ + + def __init__(self, path=None, include_egg=False): + """ + Create an instance from a path, optionally including legacy (distutils/ + setuptools/distribute) distributions. + :param path: The path to use, as a list of directories. If not specified, + sys.path is used. + :param include_egg: If True, this instance will look for and return legacy + distributions as well as those based on PEP 376. + """ + if path is None: + path = sys.path + self.path = path + self._include_dist = True + self._include_egg = include_egg + + self._cache = _Cache() + self._cache_egg = _Cache() + self._cache_enabled = True + self._scheme = get_scheme('default') + + def _get_cache_enabled(self): + return self._cache_enabled + + def _set_cache_enabled(self, value): + self._cache_enabled = value + + cache_enabled = property(_get_cache_enabled, _set_cache_enabled) + + def clear_cache(self): + """ + Clears the internal cache. + """ + self._cache.clear() + self._cache_egg.clear() + + def _yield_distributions(self): + """ + Yield .dist-info and/or .egg(-info) distributions. + """ + # We need to check if we've seen some resources already, because on + # some Linux systems (e.g. some Debian/Ubuntu variants) there are + # symlinks which alias other files in the environment. + seen = set() + for path in self.path: + finder = resources.finder_for_path(path) + if finder is None: + continue + r = finder.find('') + if not r or not r.is_container: + continue + rset = sorted(r.resources) + for entry in rset: + r = finder.find(entry) + if not r or r.path in seen: + continue + try: + if self._include_dist and entry.endswith(DISTINFO_EXT): + possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] + for metadata_filename in possible_filenames: + metadata_path = posixpath.join(entry, metadata_filename) + pydist = finder.find(metadata_path) + if pydist: + break + else: + continue + + with contextlib.closing(pydist.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + logger.debug('Found %s', r.path) + seen.add(r.path) + yield new_dist_class(r.path, metadata=metadata, env=self) + elif self._include_egg and entry.endswith(('.egg-info', '.egg')): + logger.debug('Found %s', r.path) + seen.add(r.path) + yield old_dist_class(r.path, self) + except Exception as e: + msg = 'Unable to read distribution at %s, perhaps due to bad metadata: %s' + logger.warning(msg, r.path, e) + import warnings + warnings.warn(msg % (r.path, e), stacklevel=2) + + def _generate_cache(self): + """ + Scan the path for distributions and populate the cache with + those that are found. + """ + gen_dist = not self._cache.generated + gen_egg = self._include_egg and not self._cache_egg.generated + if gen_dist or gen_egg: + for dist in self._yield_distributions(): + if isinstance(dist, InstalledDistribution): + self._cache.add(dist) + else: + self._cache_egg.add(dist) + + if gen_dist: + self._cache.generated = True + if gen_egg: + self._cache_egg.generated = True + + @classmethod + def distinfo_dirname(cls, name, version): + """ + The *name* and *version* parameters are converted into their + filename-escaped form, i.e. any ``'-'`` characters are replaced + with ``'_'`` other than the one in ``'dist-info'`` and the one + separating the name from the version number. + + :parameter name: is converted to a standard distribution name by replacing + any runs of non- alphanumeric characters with a single + ``'-'``. + :type name: string + :parameter version: is converted to a standard version string. Spaces + become dots, and all other non-alphanumeric characters + (except dots) become dashes, with runs of multiple + dashes condensed to a single dash. + :type version: string + :returns: directory name + :rtype: string""" + name = name.replace('-', '_') + return '-'.join([name, version]) + DISTINFO_EXT + + def get_distributions(self): + """ + Provides an iterator that looks for distributions and returns + :class:`InstalledDistribution` or + :class:`EggInfoDistribution` instances for each one of them. + + :rtype: iterator of :class:`InstalledDistribution` and + :class:`EggInfoDistribution` instances + """ + if not self._cache_enabled: + for dist in self._yield_distributions(): + yield dist + else: + self._generate_cache() + + for dist in self._cache.path.values(): + yield dist + + if self._include_egg: + for dist in self._cache_egg.path.values(): + yield dist + + def get_distribution(self, name): + """ + Looks for a named distribution on the path. + + This function only returns the first result found, as no more than one + value is expected. If nothing is found, ``None`` is returned. + + :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` + or ``None`` + """ + result = None + name = name.lower() + if not self._cache_enabled: + for dist in self._yield_distributions(): + if dist.key == name: + result = dist + break + else: + self._generate_cache() + + if name in self._cache.name: + result = self._cache.name[name][0] + elif self._include_egg and name in self._cache_egg.name: + result = self._cache_egg.name[name][0] + return result + + def provides_distribution(self, name, version=None): + """ + Iterates over all distributions to find which distributions provide *name*. + If a *version* is provided, it will be used to filter the results. + + This function only returns the first result found, since no more than + one values are expected. If the directory is not found, returns ``None``. + + :parameter version: a version specifier that indicates the version + required, conforming to the format in ``PEP-345`` + + :type name: string + :type version: string + """ + matcher = None + if version is not None: + try: + matcher = self._scheme.matcher('%s (%s)' % (name, version)) + except ValueError: + raise DistlibException('invalid name or version: %r, %r' % (name, version)) + + for dist in self.get_distributions(): + # We hit a problem on Travis where enum34 was installed and doesn't + # have a provides attribute ... + if not hasattr(dist, 'provides'): + logger.debug('No "provides": %s', dist) + else: + provided = dist.provides + + for p in provided: + p_name, p_ver = parse_name_and_version(p) + if matcher is None: + if p_name == name: + yield dist + break + else: + if p_name == name and matcher.match(p_ver): + yield dist + break + + def get_file_path(self, name, relative_path): + """ + Return the path to a resource file. + """ + dist = self.get_distribution(name) + if dist is None: + raise LookupError('no distribution named %r found' % name) + return dist.get_resource_path(relative_path) + + def get_exported_entries(self, category, name=None): + """ + Return all of the exported entries in a particular category. + + :param category: The category to search for entries. + :param name: If specified, only entries with that name are returned. + """ + for dist in self.get_distributions(): + r = dist.exports + if category in r: + d = r[category] + if name is not None: + if name in d: + yield d[name] + else: + for v in d.values(): + yield v + + +class Distribution(object): + """ + A base class for distributions, whether installed or from indexes. + Either way, it must have some metadata, so that's all that's needed + for construction. + """ + + build_time_dependency = False + """ + Set to True if it's known to be only a build-time dependency (i.e. + not needed after installation). + """ + + requested = False + """A boolean that indicates whether the ``REQUESTED`` metadata file is + present (in other words, whether the package was installed by user + request or it was installed as a dependency).""" + + def __init__(self, metadata): + """ + Initialise an instance. + :param metadata: The instance of :class:`Metadata` describing this + distribution. + """ + self.metadata = metadata + self.name = metadata.name + self.key = self.name.lower() # for case-insensitive comparisons + self.version = metadata.version + self.locator = None + self.digest = None + self.extras = None # additional features requested + self.context = None # environment marker overrides + self.download_urls = set() + self.digests = {} + + @property + def source_url(self): + """ + The source archive download URL for this distribution. + """ + return self.metadata.source_url + + download_url = source_url # Backward compatibility + + @property + def name_and_version(self): + """ + A utility property which displays the name and version in parentheses. + """ + return '%s (%s)' % (self.name, self.version) + + @property + def provides(self): + """ + A set of distribution names and versions provided by this distribution. + :return: A set of "name (version)" strings. + """ + plist = self.metadata.provides + s = '%s (%s)' % (self.name, self.version) + if s not in plist: + plist.append(s) + return plist + + def _get_requirements(self, req_attr): + md = self.metadata + reqts = getattr(md, req_attr) + logger.debug('%s: got requirements %r from metadata: %r', self.name, req_attr, reqts) + return set(md.get_requirements(reqts, extras=self.extras, env=self.context)) + + @property + def run_requires(self): + return self._get_requirements('run_requires') + + @property + def meta_requires(self): + return self._get_requirements('meta_requires') + + @property + def build_requires(self): + return self._get_requirements('build_requires') + + @property + def test_requires(self): + return self._get_requirements('test_requires') + + @property + def dev_requires(self): + return self._get_requirements('dev_requires') + + def matches_requirement(self, req): + """ + Say if this instance matches (fulfills) a requirement. + :param req: The requirement to match. + :rtype req: str + :return: True if it matches, else False. + """ + # Requirement may contain extras - parse to lose those + # from what's passed to the matcher + r = parse_requirement(req) + scheme = get_scheme(self.metadata.scheme) + try: + matcher = scheme.matcher(r.requirement) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + result = False + for p in self.provides: + p_name, p_ver = parse_name_and_version(p) + if p_name != name: + continue + try: + result = matcher.match(p_ver) + break + except UnsupportedVersionError: + pass + return result + + def __repr__(self): + """ + Return a textual representation of this instance, + """ + if self.source_url: + suffix = ' [%s]' % self.source_url + else: + suffix = '' + return '' % (self.name, self.version, suffix) + + def __eq__(self, other): + """ + See if this distribution is the same as another. + :param other: The distribution to compare with. To be equal to one + another. distributions must have the same type, name, + version and source_url. + :return: True if it is the same, else False. + """ + if type(other) is not type(self): + result = False + else: + result = (self.name == other.name and self.version == other.version and self.source_url == other.source_url) + return result + + def __hash__(self): + """ + Compute hash in a way which matches the equality test. + """ + return hash(self.name) + hash(self.version) + hash(self.source_url) + + +class BaseInstalledDistribution(Distribution): + """ + This is the base class for installed distributions (whether PEP 376 or + legacy). + """ + + hasher = None + + def __init__(self, metadata, path, env=None): + """ + Initialise an instance. + :param metadata: An instance of :class:`Metadata` which describes the + distribution. This will normally have been initialised + from a metadata file in the ``path``. + :param path: The path of the ``.dist-info`` or ``.egg-info`` + directory for the distribution. + :param env: This is normally the :class:`DistributionPath` + instance where this distribution was found. + """ + super(BaseInstalledDistribution, self).__init__(metadata) + self.path = path + self.dist_path = env + + def get_hash(self, data, hasher=None): + """ + Get the hash of some data, using a particular hash algorithm, if + specified. + + :param data: The data to be hashed. + :type data: bytes + :param hasher: The name of a hash implementation, supported by hashlib, + or ``None``. Examples of valid values are ``'sha1'``, + ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and + ``'sha512'``. If no hasher is specified, the ``hasher`` + attribute of the :class:`InstalledDistribution` instance + is used. If the hasher is determined to be ``None``, MD5 + is used as the hashing algorithm. + :returns: The hash of the data. If a hasher was explicitly specified, + the returned hash will be prefixed with the specified hasher + followed by '='. + :rtype: str + """ + if hasher is None: + hasher = self.hasher + if hasher is None: + hasher = hashlib.md5 + prefix = '' + else: + hasher = getattr(hashlib, hasher) + prefix = '%s=' % self.hasher + digest = hasher(data).digest() + digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') + return '%s%s' % (prefix, digest) + + +class InstalledDistribution(BaseInstalledDistribution): + """ + Created with the *path* of the ``.dist-info`` directory provided to the + constructor. It reads the metadata contained in ``pydist.json`` when it is + instantiated., or uses a passed in Metadata instance (useful for when + dry-run mode is being used). + """ + + hasher = 'sha256' + + def __init__(self, path, metadata=None, env=None): + self.modules = [] + self.finder = finder = resources.finder_for_path(path) + if finder is None: + raise ValueError('finder unavailable for %s' % path) + if env and env._cache_enabled and path in env._cache.path: + metadata = env._cache.path[path].metadata + elif metadata is None: + r = finder.find(METADATA_FILENAME) + # Temporary - for Wheel 0.23 support + if r is None: + r = finder.find(WHEEL_METADATA_FILENAME) + # Temporary - for legacy support + if r is None: + r = finder.find(LEGACY_METADATA_FILENAME) + if r is None: + raise ValueError('no %s found in %s' % (METADATA_FILENAME, path)) + with contextlib.closing(r.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + + super(InstalledDistribution, self).__init__(metadata, path, env) + + if env and env._cache_enabled: + env._cache.add(self) + + r = finder.find('REQUESTED') + self.requested = r is not None + p = os.path.join(path, 'top_level.txt') + if os.path.exists(p): + with open(p, 'rb') as f: + data = f.read().decode('utf-8') + self.modules = data.splitlines() + + def __repr__(self): + return '' % (self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def _get_records(self): + """ + Get the list of installed files for the distribution + :return: A list of tuples of path, hash and size. Note that hash and + size might be ``None`` for some entries. The path is exactly + as stored in the file (which is as in PEP 376). + """ + results = [] + r = self.get_distinfo_resource('RECORD') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as record_reader: + # Base location is parent dir of .dist-info dir + # base_location = os.path.dirname(self.path) + # base_location = os.path.abspath(base_location) + for row in record_reader: + missing = [None for i in range(len(row), 3)] + path, checksum, size = row + missing + # if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) + results.append((path, checksum, size)) + return results + + @cached_property + def exports(self): + """ + Return the information exported by this distribution. + :return: A dictionary of exports, mapping an export category to a dict + of :class:`ExportEntry` instances describing the individual + export entries, and keyed by name. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + result = self.read_exports() + return result + + def read_exports(self): + """ + Read exports data from a file in .ini format. + + :return: A dictionary of exports, mapping an export category to a list + of :class:`ExportEntry` instances describing the individual + export entries. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + with contextlib.closing(r.as_stream()) as stream: + result = read_exports(stream) + return result + + def write_exports(self, exports): + """ + Write a dictionary of exports to a file in .ini format. + :param exports: A dictionary of exports, mapping an export category to + a list of :class:`ExportEntry` instances describing the + individual export entries. + """ + rf = self.get_distinfo_file(EXPORTS_FILENAME) + with open(rf, 'w') as f: + write_exports(exports, f) + + def get_resource_path(self, relative_path): + """ + NOTE: This API may change in the future. + + Return the absolute path to a resource file with the given relative + path. + + :param relative_path: The path, relative to .dist-info, of the resource + of interest. + :return: The absolute path where the resource is to be found. + """ + r = self.get_distinfo_resource('RESOURCES') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as resources_reader: + for relative, destination in resources_reader: + if relative == relative_path: + return destination + raise KeyError('no resource file with relative path %r ' + 'is installed' % relative_path) + + def list_installed_files(self): + """ + Iterates over the ``RECORD`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: iterator of (path, hash, size) + """ + for result in self._get_records(): + yield result + + def write_installed_files(self, paths, prefix, dry_run=False): + """ + Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any + existing ``RECORD`` file is silently overwritten. + + prefix is used to determine when to write absolute paths. + """ + prefix = os.path.join(prefix, '') + base = os.path.dirname(self.path) + base_under_prefix = base.startswith(prefix) + base = os.path.join(base, '') + record_path = self.get_distinfo_file('RECORD') + logger.info('creating %s', record_path) + if dry_run: + return None + with CSVWriter(record_path) as writer: + for path in paths: + if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): + # do not put size and hash, as in PEP-376 + hash_value = size = '' + else: + size = '%d' % os.path.getsize(path) + with open(path, 'rb') as fp: + hash_value = self.get_hash(fp.read()) + if path.startswith(base) or (base_under_prefix and path.startswith(prefix)): + path = os.path.relpath(path, base) + writer.writerow((path, hash_value, size)) + + # add the RECORD file itself + if record_path.startswith(base): + record_path = os.path.relpath(record_path, base) + writer.writerow((record_path, '', '')) + return record_path + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + base = os.path.dirname(self.path) + record_path = self.get_distinfo_file('RECORD') + for path, hash_value, size in self.list_installed_files(): + if not os.path.isabs(path): + path = os.path.join(base, path) + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + elif os.path.isfile(path): + actual_size = str(os.path.getsize(path)) + if size and actual_size != size: + mismatches.append((path, 'size', size, actual_size)) + elif hash_value: + if '=' in hash_value: + hasher = hash_value.split('=', 1)[0] + else: + hasher = None + + with open(path, 'rb') as f: + actual_hash = self.get_hash(f.read(), hasher) + if actual_hash != hash_value: + mismatches.append((path, 'hash', hash_value, actual_hash)) + return mismatches + + @cached_property + def shared_locations(self): + """ + A dictionary of shared locations whose keys are in the set 'prefix', + 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. + The corresponding value is the absolute path of that category for + this distribution, and takes into account any paths selected by the + user at installation time (e.g. via command-line arguments). In the + case of the 'namespace' key, this would be a list of absolute paths + for the roots of namespace packages in this distribution. + + The first time this property is accessed, the relevant information is + read from the SHARED file in the .dist-info directory. + """ + result = {} + shared_path = os.path.join(self.path, 'SHARED') + if os.path.isfile(shared_path): + with codecs.open(shared_path, 'r', encoding='utf-8') as f: + lines = f.read().splitlines() + for line in lines: + key, value = line.split('=', 1) + if key == 'namespace': + result.setdefault(key, []).append(value) + else: + result[key] = value + return result + + def write_shared_locations(self, paths, dry_run=False): + """ + Write shared location information to the SHARED file in .dist-info. + :param paths: A dictionary as described in the documentation for + :meth:`shared_locations`. + :param dry_run: If True, the action is logged but no file is actually + written. + :return: The path of the file written to. + """ + shared_path = os.path.join(self.path, 'SHARED') + logger.info('creating %s', shared_path) + if dry_run: + return None + lines = [] + for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): + path = paths[key] + if os.path.isdir(paths[key]): + lines.append('%s=%s' % (key, path)) + for ns in paths.get('namespace', ()): + lines.append('namespace=%s' % ns) + + with codecs.open(shared_path, 'w', encoding='utf-8') as f: + f.write('\n'.join(lines)) + return shared_path + + def get_distinfo_resource(self, path): + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + finder = resources.finder_for_path(self.path) + if finder is None: + raise DistlibException('Unable to get a finder for %s' % self.path) + return finder.find(path) + + def get_distinfo_file(self, path): + """ + Returns a path located under the ``.dist-info`` directory. Returns a + string representing the path. + + :parameter path: a ``'/'``-separated path relative to the + ``.dist-info`` directory or an absolute path; + If *path* is an absolute path and doesn't start + with the ``.dist-info`` directory path, + a :class:`DistlibException` is raised + :type path: str + :rtype: str + """ + # Check if it is an absolute path # XXX use relpath, add tests + if path.find(os.sep) >= 0: + # it's an absolute path? + distinfo_dirname, path = path.split(os.sep)[-2:] + if distinfo_dirname != self.path.split(os.sep)[-1]: + raise DistlibException('dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) + + # The file must be relative + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + + return os.path.join(self.path, path) + + def list_distinfo_files(self): + """ + Iterates over the ``RECORD`` entries and returns paths for each line if + the path is pointing to a file located in the ``.dist-info`` directory + or one of its subdirectories. + + :returns: iterator of paths + """ + base = os.path.dirname(self.path) + for path, checksum, size in self._get_records(): + # XXX add separator or use real relpath algo + if not os.path.isabs(path): + path = os.path.join(base, path) + if path.startswith(self.path): + yield path + + def __eq__(self, other): + return (isinstance(other, InstalledDistribution) and self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +class EggInfoDistribution(BaseInstalledDistribution): + """Created with the *path* of the ``.egg-info`` directory or file provided + to the constructor. It reads the metadata contained in the file itself, or + if the given path happens to be a directory, the metadata is read from the + file ``PKG-INFO`` under that directory.""" + + requested = True # as we have no way of knowing, assume it was + shared_locations = {} + + def __init__(self, path, env=None): + + def set_name_and_version(s, n, v): + s.name = n + s.key = n.lower() # for case-insensitive comparisons + s.version = v + + self.path = path + self.dist_path = env + if env and env._cache_enabled and path in env._cache_egg.path: + metadata = env._cache_egg.path[path].metadata + set_name_and_version(self, metadata.name, metadata.version) + else: + metadata = self._get_metadata(path) + + # Need to be set before caching + set_name_and_version(self, metadata.name, metadata.version) + + if env and env._cache_enabled: + env._cache_egg.add(self) + super(EggInfoDistribution, self).__init__(metadata, path, env) + + def _get_metadata(self, path): + requires = None + + def parse_requires_data(data): + """Create a list of dependencies from a requires.txt file. + + *data*: the contents of a setuptools-produced requires.txt file. + """ + reqs = [] + lines = data.splitlines() + for line in lines: + line = line.strip() + # sectioned files have bare newlines (separating sections) + if not line: # pragma: no cover + continue + if line.startswith('['): # pragma: no cover + logger.warning('Unexpected line: quitting requirement scan: %r', line) + break + r = parse_requirement(line) + if not r: # pragma: no cover + logger.warning('Not recognised as a requirement: %r', line) + continue + if r.extras: # pragma: no cover + logger.warning('extra requirements in requires.txt are ' + 'not supported') + if not r.constraints: + reqs.append(r.name) + else: + cons = ', '.join('%s%s' % c for c in r.constraints) + reqs.append('%s (%s)' % (r.name, cons)) + return reqs + + def parse_requires_path(req_path): + """Create a list of dependencies from a requires.txt file. + + *req_path*: the path to a setuptools-produced requires.txt file. + """ + + reqs = [] + try: + with codecs.open(req_path, 'r', 'utf-8') as fp: + reqs = parse_requires_data(fp.read()) + except IOError: + pass + return reqs + + tl_path = tl_data = None + if path.endswith('.egg'): + if os.path.isdir(path): + p = os.path.join(path, 'EGG-INFO') + meta_path = os.path.join(p, 'PKG-INFO') + metadata = Metadata(path=meta_path, scheme='legacy') + req_path = os.path.join(p, 'requires.txt') + tl_path = os.path.join(p, 'top_level.txt') + requires = parse_requires_path(req_path) + else: + # FIXME handle the case where zipfile is not available + zipf = zipimport.zipimporter(path) + fileobj = StringIO(zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + metadata = Metadata(fileobj=fileobj, scheme='legacy') + try: + data = zipf.get_data('EGG-INFO/requires.txt') + tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') + requires = parse_requires_data(data.decode('utf-8')) + except IOError: + requires = None + elif path.endswith('.egg-info'): + if os.path.isdir(path): + req_path = os.path.join(path, 'requires.txt') + requires = parse_requires_path(req_path) + path = os.path.join(path, 'PKG-INFO') + tl_path = os.path.join(path, 'top_level.txt') + metadata = Metadata(path=path, scheme='legacy') + else: + raise DistlibException('path must end with .egg-info or .egg, ' + 'got %r' % path) + + if requires: + metadata.add_requirements(requires) + # look for top-level modules in top_level.txt, if present + if tl_data is None: + if tl_path is not None and os.path.exists(tl_path): + with open(tl_path, 'rb') as f: + tl_data = f.read().decode('utf-8') + if not tl_data: + tl_data = [] + else: + tl_data = tl_data.splitlines() + self.modules = tl_data + return metadata + + def __repr__(self): + return '' % (self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + for path, _, _ in self.list_installed_files(): + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + return mismatches + + def list_installed_files(self): + """ + Iterates over the ``installed-files.txt`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: a list of (path, hash, size) + """ + + def _md5(path): + f = open(path, 'rb') + try: + content = f.read() + finally: + f.close() + return hashlib.md5(content).hexdigest() + + def _size(path): + return os.stat(path).st_size + + record_path = os.path.join(self.path, 'installed-files.txt') + result = [] + if os.path.exists(record_path): + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + p = os.path.normpath(os.path.join(self.path, line)) + # "./" is present as a marker between installed files + # and installation metadata files + if not os.path.exists(p): + logger.warning('Non-existent file: %s', p) + if p.endswith(('.pyc', '.pyo')): + continue + # otherwise fall through and fail + if not os.path.isdir(p): + result.append((p, _md5(p), _size(p))) + result.append((record_path, None, None)) + return result + + def list_distinfo_files(self, absolute=False): + """ + Iterates over the ``installed-files.txt`` entries and returns paths for + each line if the path is pointing to a file located in the + ``.egg-info`` directory or one of its subdirectories. + + :parameter absolute: If *absolute* is ``True``, each returned path is + transformed into a local absolute path. Otherwise the + raw value from ``installed-files.txt`` is returned. + :type absolute: boolean + :returns: iterator of paths + """ + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + skip = True + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + if line == './': + skip = False + continue + if not skip: + p = os.path.normpath(os.path.join(self.path, line)) + if p.startswith(self.path): + if absolute: + yield p + else: + yield line + + def __eq__(self, other): + return (isinstance(other, EggInfoDistribution) and self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +new_dist_class = InstalledDistribution +old_dist_class = EggInfoDistribution + + +class DependencyGraph(object): + """ + Represents a dependency graph between distributions. + + The dependency relationships are stored in an ``adjacency_list`` that maps + distributions to a list of ``(other, label)`` tuples where ``other`` + is a distribution and the edge is labeled with ``label`` (i.e. the version + specifier, if such was provided). Also, for more efficient traversal, for + every distribution ``x``, a list of predecessors is kept in + ``reverse_list[x]``. An edge from distribution ``a`` to + distribution ``b`` means that ``a`` depends on ``b``. If any missing + dependencies are found, they are stored in ``missing``, which is a + dictionary that maps distributions to a list of requirements that were not + provided by any other distributions. + """ + + def __init__(self): + self.adjacency_list = {} + self.reverse_list = {} + self.missing = {} + + def add_distribution(self, distribution): + """Add the *distribution* to the graph. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + """ + self.adjacency_list[distribution] = [] + self.reverse_list[distribution] = [] + # self.missing[distribution] = [] + + def add_edge(self, x, y, label=None): + """Add an edge from distribution *x* to distribution *y* with the given + *label*. + + :type x: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type y: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type label: ``str`` or ``None`` + """ + self.adjacency_list[x].append((y, label)) + # multiple edges are allowed, so be careful + if x not in self.reverse_list[y]: + self.reverse_list[y].append(x) + + def add_missing(self, distribution, requirement): + """ + Add a missing *requirement* for the given *distribution*. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + :type requirement: ``str`` + """ + logger.debug('%s missing %r', distribution, requirement) + self.missing.setdefault(distribution, []).append(requirement) + + def _repr_dist(self, dist): + return '%s %s' % (dist.name, dist.version) + + def repr_node(self, dist, level=1): + """Prints only a subgraph""" + output = [self._repr_dist(dist)] + for other, label in self.adjacency_list[dist]: + dist = self._repr_dist(other) + if label is not None: + dist = '%s [%s]' % (dist, label) + output.append(' ' * level + str(dist)) + suboutput = self.repr_node(other, level + 1) + subs = suboutput.split('\n') + output.extend(subs[1:]) + return '\n'.join(output) + + def to_dot(self, f, skip_disconnected=True): + """Writes a DOT output for the graph to the provided file *f*. + + If *skip_disconnected* is set to ``True``, then all distributions + that are not dependent on any other distribution are skipped. + + :type f: has to support ``file``-like operations + :type skip_disconnected: ``bool`` + """ + disconnected = [] + + f.write("digraph dependencies {\n") + for dist, adjs in self.adjacency_list.items(): + if len(adjs) == 0 and not skip_disconnected: + disconnected.append(dist) + for other, label in adjs: + if label is not None: + f.write('"%s" -> "%s" [label="%s"]\n' % (dist.name, other.name, label)) + else: + f.write('"%s" -> "%s"\n' % (dist.name, other.name)) + if not skip_disconnected and len(disconnected) > 0: + f.write('subgraph disconnected {\n') + f.write('label = "Disconnected"\n') + f.write('bgcolor = red\n') + + for dist in disconnected: + f.write('"%s"' % dist.name) + f.write('\n') + f.write('}\n') + f.write('}\n') + + def topological_sort(self): + """ + Perform a topological sort of the graph. + :return: A tuple, the first element of which is a topologically sorted + list of distributions, and the second element of which is a + list of distributions that cannot be sorted because they have + circular dependencies and so form a cycle. + """ + result = [] + # Make a shallow copy of the adjacency list + alist = {} + for k, v in self.adjacency_list.items(): + alist[k] = v[:] + while True: + # See what we can remove in this run + to_remove = [] + for k, v in list(alist.items())[:]: + if not v: + to_remove.append(k) + del alist[k] + if not to_remove: + # What's left in alist (if anything) is a cycle. + break + # Remove from the adjacency list of others + for k, v in alist.items(): + alist[k] = [(d, r) for d, r in v if d not in to_remove] + logger.debug('Moving to result: %s', ['%s (%s)' % (d.name, d.version) for d in to_remove]) + result.extend(to_remove) + return result, list(alist.keys()) + + def __repr__(self): + """Representation of the graph""" + output = [] + for dist, adjs in self.adjacency_list.items(): + output.append(self.repr_node(dist)) + return '\n'.join(output) + + +def make_graph(dists, scheme='default'): + """Makes a dependency graph from the given distributions. + + :parameter dists: a list of distributions + :type dists: list of :class:`distutils2.database.InstalledDistribution` and + :class:`distutils2.database.EggInfoDistribution` instances + :rtype: a :class:`DependencyGraph` instance + """ + scheme = get_scheme(scheme) + graph = DependencyGraph() + provided = {} # maps names to lists of (version, dist) tuples + + # first, build the graph and find out what's provided + for dist in dists: + graph.add_distribution(dist) + + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + provided.setdefault(name, []).append((version, dist)) + + # now make the edges + for dist in dists: + requires = (dist.run_requires | dist.meta_requires | dist.build_requires | dist.dev_requires) + for req in requires: + try: + matcher = scheme.matcher(req) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + matched = False + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + graph.add_edge(dist, provider, req) + matched = True + break + if not matched: + graph.add_missing(dist, req) + return graph + + +def get_dependent_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + dependent on *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + dep = [dist] # dependent distributions + todo = graph.reverse_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop() + dep.append(d) + for succ in graph.reverse_list[d]: + if succ not in dep: + todo.append(succ) + + dep.pop(0) # remove dist from dep, was there to prevent infinite loops + return dep + + +def get_required_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + required by *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + in finding the dependencies. + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + req = set() # required distributions + todo = graph.adjacency_list[dist] # list of nodes we should inspect + seen = set(t[0] for t in todo) # already added to todo + + while todo: + d = todo.pop()[0] + req.add(d) + pred_list = graph.adjacency_list[d] + for pred in pred_list: + d = pred[0] + if d not in req and d not in seen: + seen.add(d) + todo.append(pred) + return req + + +def make_dist(name, version, **kwargs): + """ + A convenience method for making a dist given just a name and version. + """ + summary = kwargs.pop('summary', 'Placeholder for summary') + md = Metadata(**kwargs) + md.name = name + md.version = version + md.summary = summary or 'Placeholder for summary' + return Distribution(md) diff --git a/.venv/Lib/site-packages/distlib/index.py b/.venv/Lib/site-packages/distlib/index.py new file mode 100644 index 00000000..56cd2867 --- /dev/null +++ b/.venv/Lib/site-packages/distlib/index.py @@ -0,0 +1,508 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import hashlib +import logging +import os +import shutil +import subprocess +import tempfile +try: + from threading import Thread +except ImportError: # pragma: no cover + from dummy_threading import Thread + +from . import DistlibException +from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, + urlparse, build_opener, string_types) +from .util import zip_dir, ServerProxy + +logger = logging.getLogger(__name__) + +DEFAULT_INDEX = 'https://pypi.org/pypi' +DEFAULT_REALM = 'pypi' + + +class PackageIndex(object): + """ + This class represents a package index compatible with PyPI, the Python + Package Index. + """ + + boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' + + def __init__(self, url=None): + """ + Initialise an instance. + + :param url: The URL of the index. If not specified, the URL for PyPI is + used. + """ + self.url = url or DEFAULT_INDEX + self.read_configuration() + scheme, netloc, path, params, query, frag = urlparse(self.url) + if params or query or frag or scheme not in ('http', 'https'): + raise DistlibException('invalid repository: %s' % self.url) + self.password_handler = None + self.ssl_verifier = None + self.gpg = None + self.gpg_home = None + with open(os.devnull, 'w') as sink: + # Use gpg by default rather than gpg2, as gpg2 insists on + # prompting for passwords + for s in ('gpg', 'gpg2'): + try: + rc = subprocess.check_call([s, '--version'], stdout=sink, + stderr=sink) + if rc == 0: + self.gpg = s + break + except OSError: + pass + + def _get_pypirc_command(self): + """ + Get the distutils command for interacting with PyPI configurations. + :return: the command. + """ + from .util import _get_pypirc_command as cmd + return cmd() + + def read_configuration(self): + """ + Read the PyPI access configuration as supported by distutils. This populates + ``username``, ``password``, ``realm`` and ``url`` attributes from the + configuration. + """ + from .util import _load_pypirc + cfg = _load_pypirc(self) + self.username = cfg.get('username') + self.password = cfg.get('password') + self.realm = cfg.get('realm', 'pypi') + self.url = cfg.get('repository', self.url) + + def save_configuration(self): + """ + Save the PyPI access configuration. You must have set ``username`` and + ``password`` attributes before calling this method. + """ + self.check_credentials() + from .util import _store_pypirc + _store_pypirc(self) + + def check_credentials(self): + """ + Check that ``username`` and ``password`` have been set, and raise an + exception if not. + """ + if self.username is None or self.password is None: + raise DistlibException('username and password must be set') + pm = HTTPPasswordMgr() + _, netloc, _, _, _, _ = urlparse(self.url) + pm.add_password(self.realm, netloc, self.username, self.password) + self.password_handler = HTTPBasicAuthHandler(pm) + + def register(self, metadata): # pragma: no cover + """ + Register a distribution on PyPI, using the provided metadata. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the distribution to be + registered. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + metadata.validate() + d = metadata.todict() + d[':action'] = 'verify' + request = self.encode_request(d.items(), []) + self.send_request(request) + d[':action'] = 'submit' + request = self.encode_request(d.items(), []) + return self.send_request(request) + + def _reader(self, name, stream, outbuf): + """ + Thread runner for reading lines of from a subprocess into a buffer. + + :param name: The logical name of the stream (used for logging only). + :param stream: The stream to read from. This will typically a pipe + connected to the output stream of a subprocess. + :param outbuf: The list to append the read lines to. + """ + while True: + s = stream.readline() + if not s: + break + s = s.decode('utf-8').rstrip() + outbuf.append(s) + logger.debug('%s: %s' % (name, s)) + stream.close() + + def get_sign_command(self, filename, signer, sign_password, keystore=None): # pragma: no cover + """ + Return a suitable command for signing a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The signing command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + if sign_password is not None: + cmd.extend(['--batch', '--passphrase-fd', '0']) + td = tempfile.mkdtemp() + sf = os.path.join(td, os.path.basename(filename) + '.asc') + cmd.extend(['--detach-sign', '--armor', '--local-user', + signer, '--output', sf, filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd, sf + + def run_command(self, cmd, input_data=None): + """ + Run a command in a child process , passing it any input data specified. + + :param cmd: The command to run. + :param input_data: If specified, this must be a byte string containing + data to be sent to the child process. + :return: A tuple consisting of the subprocess' exit code, a list of + lines read from the subprocess' ``stdout``, and a list of + lines read from the subprocess' ``stderr``. + """ + kwargs = { + 'stdout': subprocess.PIPE, + 'stderr': subprocess.PIPE, + } + if input_data is not None: + kwargs['stdin'] = subprocess.PIPE + stdout = [] + stderr = [] + p = subprocess.Popen(cmd, **kwargs) + # We don't use communicate() here because we may need to + # get clever with interacting with the command + t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) + t1.start() + t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) + t2.start() + if input_data is not None: + p.stdin.write(input_data) + p.stdin.close() + + p.wait() + t1.join() + t2.join() + return p.returncode, stdout, stderr + + def sign_file(self, filename, signer, sign_password, keystore=None): # pragma: no cover + """ + Sign a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The absolute pathname of the file where the signature is + stored. + """ + cmd, sig_file = self.get_sign_command(filename, signer, sign_password, + keystore) + rc, stdout, stderr = self.run_command(cmd, + sign_password.encode('utf-8')) + if rc != 0: + raise DistlibException('sign command failed with error ' + 'code %s' % rc) + return sig_file + + def upload_file(self, metadata, filename, signer=None, sign_password=None, + filetype='sdist', pyversion='source', keystore=None): + """ + Upload a release file to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the file to be uploaded. + :param filename: The pathname of the file to be uploaded. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param filetype: The type of the file being uploaded. This is the + distutils command which produced that file, e.g. + ``sdist`` or ``bdist_wheel``. + :param pyversion: The version of Python which the release relates + to. For code compatible with any Python, this would + be ``source``, otherwise it would be e.g. ``3.2``. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.exists(filename): + raise DistlibException('not found: %s' % filename) + metadata.validate() + d = metadata.todict() + sig_file = None + if signer: + if not self.gpg: + logger.warning('no signing program available - not signed') + else: + sig_file = self.sign_file(filename, signer, sign_password, + keystore) + with open(filename, 'rb') as f: + file_data = f.read() + md5_digest = hashlib.md5(file_data).hexdigest() + sha256_digest = hashlib.sha256(file_data).hexdigest() + d.update({ + ':action': 'file_upload', + 'protocol_version': '1', + 'filetype': filetype, + 'pyversion': pyversion, + 'md5_digest': md5_digest, + 'sha256_digest': sha256_digest, + }) + files = [('content', os.path.basename(filename), file_data)] + if sig_file: + with open(sig_file, 'rb') as f: + sig_data = f.read() + files.append(('gpg_signature', os.path.basename(sig_file), + sig_data)) + shutil.rmtree(os.path.dirname(sig_file)) + request = self.encode_request(d.items(), files) + return self.send_request(request) + + def upload_documentation(self, metadata, doc_dir): # pragma: no cover + """ + Upload documentation to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the documentation to be + uploaded. + :param doc_dir: The pathname of the directory which contains the + documentation. This should be the directory that + contains the ``index.html`` for the documentation. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.isdir(doc_dir): + raise DistlibException('not a directory: %r' % doc_dir) + fn = os.path.join(doc_dir, 'index.html') + if not os.path.exists(fn): + raise DistlibException('not found: %r' % fn) + metadata.validate() + name, version = metadata.name, metadata.version + zip_data = zip_dir(doc_dir).getvalue() + fields = [(':action', 'doc_upload'), + ('name', name), ('version', version)] + files = [('content', name, zip_data)] + request = self.encode_request(fields, files) + return self.send_request(request) + + def get_verify_command(self, signature_filename, data_filename, + keystore=None): + """ + Return a suitable command for verifying a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The verifying command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + cmd.extend(['--verify', signature_filename, data_filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd + + def verify_signature(self, signature_filename, data_filename, + keystore=None): + """ + Verify a signature for a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: True if the signature was verified, else False. + """ + if not self.gpg: + raise DistlibException('verification unavailable because gpg ' + 'unavailable') + cmd = self.get_verify_command(signature_filename, data_filename, + keystore) + rc, stdout, stderr = self.run_command(cmd) + if rc not in (0, 1): + raise DistlibException('verify command failed with error code %s' % rc) + return rc == 0 + + def download_file(self, url, destfile, digest=None, reporthook=None): + """ + This is a convenience method for downloading a file from an URL. + Normally, this will be a file from the index, though currently + no check is made for this (i.e. a file can be downloaded from + anywhere). + + The method is just like the :func:`urlretrieve` function in the + standard library, except that it allows digest computation to be + done during download and checking that the downloaded data + matched any expected value. + + :param url: The URL of the file to be downloaded (assumed to be + available via an HTTP GET request). + :param destfile: The pathname where the downloaded file is to be + saved. + :param digest: If specified, this must be a (hasher, value) + tuple, where hasher is the algorithm used (e.g. + ``'md5'``) and ``value`` is the expected value. + :param reporthook: The same as for :func:`urlretrieve` in the + standard library. + """ + if digest is None: + digester = None + logger.debug('No digest specified') + else: + if isinstance(digest, (list, tuple)): + hasher, digest = digest + else: + hasher = 'md5' + digester = getattr(hashlib, hasher)() + logger.debug('Digest specified: %s' % digest) + # The following code is equivalent to urlretrieve. + # We need to do it this way so that we can compute the + # digest of the file as we go. + with open(destfile, 'wb') as dfp: + # addinfourl is not a context manager on 2.x + # so we have to use try/finally + sfp = self.send_request(Request(url)) + try: + headers = sfp.info() + blocksize = 8192 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, blocksize, size) + while True: + block = sfp.read(blocksize) + if not block: + break + read += len(block) + dfp.write(block) + if digester: + digester.update(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, blocksize, size) + finally: + sfp.close() + + # check that we got the whole file, if we can + if size >= 0 and read < size: + raise DistlibException( + 'retrieval incomplete: got only %d out of %d bytes' + % (read, size)) + # if we have a digest, it must match. + if digester: + actual = digester.hexdigest() + if digest != actual: + raise DistlibException('%s digest mismatch for %s: expected ' + '%s, got %s' % (hasher, destfile, + digest, actual)) + logger.debug('Digest verified: %s', digest) + + def send_request(self, req): + """ + Send a standard library :class:`Request` to PyPI and return its + response. + + :param req: The request to send. + :return: The HTTP response from PyPI (a standard library HTTPResponse). + """ + handlers = [] + if self.password_handler: + handlers.append(self.password_handler) + if self.ssl_verifier: + handlers.append(self.ssl_verifier) + opener = build_opener(*handlers) + return opener.open(req) + + def encode_request(self, fields, files): + """ + Encode fields and files for posting to an HTTP server. + + :param fields: The fields to send as a list of (fieldname, value) + tuples. + :param files: The files to send as a list of (fieldname, filename, + file_bytes) tuple. + """ + # Adapted from packaging, which in turn was adapted from + # http://code.activestate.com/recipes/146306 + + parts = [] + boundary = self.boundary + for k, values in fields: + if not isinstance(values, (list, tuple)): + values = [values] + + for v in values: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"' % + k).encode('utf-8'), + b'', + v.encode('utf-8'))) + for key, filename, value in files: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"; filename="%s"' % + (key, filename)).encode('utf-8'), + b'', + value)) + + parts.extend((b'--' + boundary + b'--', b'')) + + body = b'\r\n'.join(parts) + ct = b'multipart/form-data; boundary=' + boundary + headers = { + 'Content-type': ct, + 'Content-length': str(len(body)) + } + return Request(self.url, body, headers) + + def search(self, terms, operator=None): # pragma: no cover + if isinstance(terms, string_types): + terms = {'name': terms} + rpc_proxy = ServerProxy(self.url, timeout=3.0) + try: + return rpc_proxy.search(terms, operator or 'and') + finally: + rpc_proxy('close')() diff --git a/.venv/Lib/site-packages/distlib/locators.py b/.venv/Lib/site-packages/distlib/locators.py new file mode 100644 index 00000000..222c1bf3 --- /dev/null +++ b/.venv/Lib/site-packages/distlib/locators.py @@ -0,0 +1,1295 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# + +import gzip +from io import BytesIO +import json +import logging +import os +import posixpath +import re +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import zlib + +from . import DistlibException +from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, queue, quote, unescape, build_opener, + HTTPRedirectHandler as BaseRedirectHandler, text_type, Request, HTTPError, URLError) +from .database import Distribution, DistributionPath, make_dist +from .metadata import Metadata, MetadataInvalidError +from .util import (cached_property, ensure_slash, split_filename, get_project_data, parse_requirement, + parse_name_and_version, ServerProxy, normalize_name) +from .version import get_scheme, UnsupportedVersionError +from .wheel import Wheel, is_compatible + +logger = logging.getLogger(__name__) + +HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') +CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) +HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') +DEFAULT_INDEX = 'https://pypi.org/pypi' + + +def get_all_distribution_names(url=None): + """ + Return all distribution names known by an index. + :param url: The URL of the index. + :return: A list of all known distribution names. + """ + if url is None: + url = DEFAULT_INDEX + client = ServerProxy(url, timeout=3.0) + try: + return client.list_packages() + finally: + client('close')() + + +class RedirectHandler(BaseRedirectHandler): + """ + A class to work around a bug in some Python 3.2.x releases. + """ + + # There's a bug in the base version for some 3.2.x + # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header + # returns e.g. /abc, it bails because it says the scheme '' + # is bogus, when actually it should use the request's + # URL for the scheme. See Python issue #13696. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + newurl = None + for key in ('location', 'uri'): + if key in headers: + newurl = headers[key] + break + if newurl is None: # pragma: no cover + return + urlparts = urlparse(newurl) + if urlparts.scheme == '': + newurl = urljoin(req.get_full_url(), newurl) + if hasattr(headers, 'replace_header'): + headers.replace_header(key, newurl) + else: + headers[key] = newurl + return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, headers) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + + +class Locator(object): + """ + A base class for locators - things that locate distributions. + """ + source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') + binary_extensions = ('.egg', '.exe', '.whl') + excluded_extensions = ('.pdf', ) + + # A list of tags indicating which wheels you want to match. The default + # value of None matches against the tags compatible with the running + # Python. If you want to match other values, set wheel_tags on a locator + # instance to a list of tuples (pyver, abi, arch) which you want to match. + wheel_tags = None + + downloadable_extensions = source_extensions + ('.whl', ) + + def __init__(self, scheme='default'): + """ + Initialise an instance. + :param scheme: Because locators look for most recent versions, they + need to know the version scheme to use. This specifies + the current PEP-recommended scheme - use ``'legacy'`` + if you need to support existing distributions on PyPI. + """ + self._cache = {} + self.scheme = scheme + # Because of bugs in some of the handlers on some of the platforms, + # we use our own opener rather than just using urlopen. + self.opener = build_opener(RedirectHandler()) + # If get_project() is called from locate(), the matcher instance + # is set from the requirement passed to locate(). See issue #18 for + # why this can be useful to know. + self.matcher = None + self.errors = queue.Queue() + + def get_errors(self): + """ + Return any errors which have occurred. + """ + result = [] + while not self.errors.empty(): # pragma: no cover + try: + e = self.errors.get(False) + result.append(e) + except self.errors.Empty: + continue + self.errors.task_done() + return result + + def clear_errors(self): + """ + Clear any errors which may have been logged. + """ + # Just get the errors and throw them away + self.get_errors() + + def clear_cache(self): + self._cache.clear() + + def _get_scheme(self): + return self._scheme + + def _set_scheme(self, value): + self._scheme = value + + scheme = property(_get_scheme, _set_scheme) + + def _get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This should be implemented in subclasses. + + If called from a locate() request, self.matcher will be set to a + matcher for the requirement to satisfy, otherwise it will be None. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This calls _get_project to do all the work, and just implements a caching layer on top. + """ + if self._cache is None: # pragma: no cover + result = self._get_project(name) + elif name in self._cache: + result = self._cache[name] + else: + self.clear_errors() + result = self._get_project(name) + self._cache[name] = result + return result + + def score_url(self, url): + """ + Give an url a score which can be used to choose preferred URLs + for a given project release. + """ + t = urlparse(url) + basename = posixpath.basename(t.path) + compatible = True + is_wheel = basename.endswith('.whl') + is_downloadable = basename.endswith(self.downloadable_extensions) + if is_wheel: + compatible = is_compatible(Wheel(basename), self.wheel_tags) + return (t.scheme == 'https', 'pypi.org' in t.netloc, is_downloadable, is_wheel, compatible, basename) + + def prefer_url(self, url1, url2): + """ + Choose one of two URLs where both are candidates for distribution + archives for the same version of a distribution (for example, + .tar.gz vs. zip). + + The current implementation favours https:// URLs over http://, archives + from PyPI over those from other locations, wheel compatibility (if a + wheel) and then the archive name. + """ + result = url2 + if url1: + s1 = self.score_url(url1) + s2 = self.score_url(url2) + if s1 > s2: + result = url1 + if result != url2: + logger.debug('Not replacing %r with %r', url1, url2) + else: + logger.debug('Replacing %r with %r', url1, url2) + return result + + def split_filename(self, filename, project_name): + """ + Attempt to split a filename in project name, version and Python version. + """ + return split_filename(filename, project_name) + + def convert_url_to_download_info(self, url, project_name): + """ + See if a URL is a candidate for a download URL for a project (the URL + has typically been scraped from an HTML page). + + If it is, a dictionary is returned with keys "name", "version", + "filename" and "url"; otherwise, None is returned. + """ + + def same_project(name1, name2): + return normalize_name(name1) == normalize_name(name2) + + result = None + scheme, netloc, path, params, query, frag = urlparse(url) + if frag.lower().startswith('egg='): # pragma: no cover + logger.debug('%s: version hint in fragment: %r', project_name, frag) + m = HASHER_HASH.match(frag) + if m: + algo, digest = m.groups() + else: + algo, digest = None, None + origpath = path + if path and path[-1] == '/': # pragma: no cover + path = path[:-1] + if path.endswith('.whl'): + try: + wheel = Wheel(path) + if not is_compatible(wheel, self.wheel_tags): + logger.debug('Wheel not compatible: %s', path) + else: + if project_name is None: + include = True + else: + include = same_project(wheel.name, project_name) + if include: + result = { + 'name': wheel.name, + 'version': wheel.version, + 'filename': wheel.filename, + 'url': urlunparse((scheme, netloc, origpath, params, query, '')), + 'python-version': ', '.join(['.'.join(list(v[2:])) for v in wheel.pyver]), + } + except Exception: # pragma: no cover + logger.warning('invalid path for wheel: %s', path) + elif not path.endswith(self.downloadable_extensions): # pragma: no cover + logger.debug('Not downloadable: %s', path) + else: # downloadable extension + path = filename = posixpath.basename(path) + for ext in self.downloadable_extensions: + if path.endswith(ext): + path = path[:-len(ext)] + t = self.split_filename(path, project_name) + if not t: # pragma: no cover + logger.debug('No match for project/version: %s', path) + else: + name, version, pyver = t + if not project_name or same_project(project_name, name): + result = { + 'name': name, + 'version': version, + 'filename': filename, + 'url': urlunparse((scheme, netloc, origpath, params, query, '')), + } + if pyver: # pragma: no cover + result['python-version'] = pyver + break + if result and algo: + result['%s_digest' % algo] = digest + return result + + def _get_digest(self, info): + """ + Get a digest from a dictionary by looking at a "digests" dictionary + or keys of the form 'algo_digest'. + + Returns a 2-tuple (algo, digest) if found, else None. Currently + looks only for SHA256, then MD5. + """ + result = None + if 'digests' in info: + digests = info['digests'] + for algo in ('sha256', 'md5'): + if algo in digests: + result = (algo, digests[algo]) + break + if not result: + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break + return result + + def _update_version_data(self, result, info): + """ + Update a result dictionary (the final result from _get_project) with a + dictionary for a specific version, which typically holds information + gleaned from a filename or URL for an archive for the distribution. + """ + name = info.pop('name') + version = info.pop('version') + if version in result: + dist = result[version] + md = dist.metadata + else: + dist = make_dist(name, version, scheme=self.scheme) + md = dist.metadata + dist.digest = digest = self._get_digest(info) + url = info['url'] + result['digests'][url] = digest + if md.source_url != info['url']: + md.source_url = self.prefer_url(md.source_url, url) + result['urls'].setdefault(version, set()).add(url) + dist.locator = self + result[version] = dist + + def locate(self, requirement, prereleases=False): + """ + Find the most recent distribution which matches the given + requirement. + + :param requirement: A requirement of the form 'foo (1.0)' or perhaps + 'foo (>= 1.0, < 2.0, != 1.3)' + :param prereleases: If ``True``, allow pre-release versions + to be located. Otherwise, pre-release versions + are not returned. + :return: A :class:`Distribution` instance, or ``None`` if no such + distribution could be located. + """ + result = None + r = parse_requirement(requirement) + if r is None: # pragma: no cover + raise DistlibException('Not a valid requirement: %r' % requirement) + scheme = get_scheme(self.scheme) + self.matcher = matcher = scheme.matcher(r.requirement) + logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) + versions = self.get_project(r.name) + if len(versions) > 2: # urls and digests keys are present + # sometimes, versions are invalid + slist = [] + vcls = matcher.version_class + for k in versions: + if k in ('urls', 'digests'): + continue + try: + if not matcher.match(k): + pass # logger.debug('%s did not match %r', matcher, k) + else: + if prereleases or not vcls(k).is_prerelease: + slist.append(k) + except Exception: # pragma: no cover + logger.warning('error matching %s with %r', matcher, k) + pass # slist.append(k) + if len(slist) > 1: + slist = sorted(slist, key=scheme.key) + if slist: + logger.debug('sorted list: %s', slist) + version = slist[-1] + result = versions[version] + if result: + if r.extras: + result.extras = r.extras + result.download_urls = versions.get('urls', {}).get(version, set()) + d = {} + sd = versions.get('digests', {}) + for url in result.download_urls: + if url in sd: # pragma: no cover + d[url] = sd[url] + result.digests = d + self.matcher = None + return result + + +class PyPIRPCLocator(Locator): + """ + This locator uses XML-RPC to locate distributions. It therefore + cannot be used with simple mirrors (that only mirror file content). + """ + + def __init__(self, url, **kwargs): + """ + Initialise an instance. + + :param url: The URL to use for XML-RPC. + :param kwargs: Passed to the superclass constructor. + """ + super(PyPIRPCLocator, self).__init__(**kwargs) + self.base_url = url + self.client = ServerProxy(url, timeout=3.0) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + return set(self.client.list_packages()) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + versions = self.client.package_releases(name, True) + for v in versions: + urls = self.client.release_urls(name, v) + data = self.client.release_data(name, v) + metadata = Metadata(scheme=self.scheme) + metadata.name = data['name'] + metadata.version = data['version'] + metadata.license = data.get('license') + metadata.keywords = data.get('keywords', []) + metadata.summary = data.get('summary') + dist = Distribution(metadata) + if urls: + info = urls[0] + metadata.source_url = info['url'] + dist.digest = self._get_digest(info) + dist.locator = self + result[v] = dist + for info in urls: + url = info['url'] + digest = self._get_digest(info) + result['urls'].setdefault(v, set()).add(url) + result['digests'][url] = digest + return result + + +class PyPIJSONLocator(Locator): + """ + This locator uses PyPI's JSON interface. It's very limited in functionality + and probably not worth using. + """ + + def __init__(self, url, **kwargs): + super(PyPIJSONLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + url = urljoin(self.base_url, '%s/json' % quote(name)) + try: + resp = self.opener.open(url) + data = resp.read().decode() # for now + d = json.loads(data) + md = Metadata(scheme=self.scheme) + data = d['info'] + md.name = data['name'] + md.version = data['version'] + md.license = data.get('license') + md.keywords = data.get('keywords', []) + md.summary = data.get('summary') + dist = Distribution(md) + dist.locator = self + # urls = d['urls'] + result[md.version] = dist + for info in d['urls']: + url = info['url'] + dist.download_urls.add(url) + dist.digests[url] = self._get_digest(info) + result['urls'].setdefault(md.version, set()).add(url) + result['digests'][url] = self._get_digest(info) + # Now get other releases + for version, infos in d['releases'].items(): + if version == md.version: + continue # already done + omd = Metadata(scheme=self.scheme) + omd.name = md.name + omd.version = version + odist = Distribution(omd) + odist.locator = self + result[version] = odist + for info in infos: + url = info['url'] + odist.download_urls.add(url) + odist.digests[url] = self._get_digest(info) + result['urls'].setdefault(version, set()).add(url) + result['digests'][url] = self._get_digest(info) + + +# for info in urls: +# md.source_url = info['url'] +# dist.digest = self._get_digest(info) +# dist.locator = self +# for info in urls: +# url = info['url'] +# result['urls'].setdefault(md.version, set()).add(url) +# result['digests'][url] = self._get_digest(info) + except Exception as e: + self.errors.put(text_type(e)) + logger.exception('JSON fetch failed: %s', e) + return result + + +class Page(object): + """ + This class represents a scraped HTML page. + """ + # The following slightly hairy-looking regex just looks for the contents of + # an anchor link, which has an attribute "href" either immediately preceded + # or immediately followed by a "rel" attribute. The attribute values can be + # declared with double quotes, single quotes or no quotes - which leads to + # the length of the expression. + _href = re.compile( + """ +(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)? +href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)) +(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))? +""", re.I | re.S | re.X) + _base = re.compile(r"""]+)""", re.I | re.S) + + def __init__(self, data, url): + """ + Initialise an instance with the Unicode page contents and the URL they + came from. + """ + self.data = data + self.base_url = self.url = url + m = self._base.search(self.data) + if m: + self.base_url = m.group(1) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + @cached_property + def links(self): + """ + Return the URLs of all the links on a page together with information + about their "rel" attribute, for determining which ones to treat as + downloads and which ones to queue for further scraping. + """ + + def clean(url): + "Tidy up an URL." + scheme, netloc, path, params, query, frag = urlparse(url) + return urlunparse((scheme, netloc, quote(path), params, query, frag)) + + result = set() + for match in self._href.finditer(self.data): + d = match.groupdict('') + rel = (d['rel1'] or d['rel2'] or d['rel3'] or d['rel4'] or d['rel5'] or d['rel6']) + url = d['url1'] or d['url2'] or d['url3'] + url = urljoin(self.base_url, url) + url = unescape(url) + url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) + result.add((url, rel)) + # We sort the result, hoping to bring the most recent versions + # to the front + result = sorted(result, key=lambda t: t[0], reverse=True) + return result + + +class SimpleScrapingLocator(Locator): + """ + A locator which scrapes HTML pages to locate downloads for a distribution. + This runs multiple threads to do the I/O; performance is at least as good + as pip's PackageFinder, which works in an analogous fashion. + """ + + # These are used to deal with various Content-Encoding schemes. + decoders = { + 'deflate': zlib.decompress, + 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(b)).read(), + 'none': lambda b: b, + } + + def __init__(self, url, timeout=None, num_workers=10, **kwargs): + """ + Initialise an instance. + :param url: The root URL to use for scraping. + :param timeout: The timeout, in seconds, to be applied to requests. + This defaults to ``None`` (no timeout specified). + :param num_workers: The number of worker threads you want to do I/O, + This defaults to 10. + :param kwargs: Passed to the superclass. + """ + super(SimpleScrapingLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + self.timeout = timeout + self._page_cache = {} + self._seen = set() + self._to_fetch = queue.Queue() + self._bad_hosts = set() + self.skip_externals = False + self.num_workers = num_workers + self._lock = threading.RLock() + # See issue #45: we need to be resilient when the locator is used + # in a thread, e.g. with concurrent.futures. We can't use self._lock + # as it is for coordinating our internal threads - the ones created + # in _prepare_threads. + self._gplock = threading.RLock() + self.platform_check = False # See issue #112 + + def _prepare_threads(self): + """ + Threads are created only when get_project is called, and terminate + before it returns. They are there primarily to parallelise I/O (i.e. + fetching web pages). + """ + self._threads = [] + for i in range(self.num_workers): + t = threading.Thread(target=self._fetch) + t.daemon = True + t.start() + self._threads.append(t) + + def _wait_threads(self): + """ + Tell all the threads to terminate (by sending a sentinel value) and + wait for them to do so. + """ + # Note that you need two loops, since you can't say which + # thread will get each sentinel + for t in self._threads: + self._to_fetch.put(None) # sentinel + for t in self._threads: + t.join() + self._threads = [] + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + with self._gplock: + self.result = result + self.project_name = name + url = urljoin(self.base_url, '%s/' % quote(name)) + self._seen.clear() + self._page_cache.clear() + self._prepare_threads() + try: + logger.debug('Queueing %s', url) + self._to_fetch.put(url) + self._to_fetch.join() + finally: + self._wait_threads() + del self.result + return result + + platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|' + r'win(32|_amd64)|macosx_?\d+)\b', re.I) + + def _is_platform_dependent(self, url): + """ + Does an URL refer to a platform-specific download? + """ + return self.platform_dependent.search(url) + + def _process_download(self, url): + """ + See if an URL is a suitable download for a project. + + If it is, register information in the result dictionary (for + _get_project) about the specific version it's for. + + Note that the return value isn't actually used other than as a boolean + value. + """ + if self.platform_check and self._is_platform_dependent(url): + info = None + else: + info = self.convert_url_to_download_info(url, self.project_name) + logger.debug('process_download: %s -> %s', url, info) + if info: + with self._lock: # needed because self.result is shared + self._update_version_data(self.result, info) + return info + + def _should_queue(self, link, referrer, rel): + """ + Determine whether a link URL from a referring page and with a + particular "rel" attribute should be queued for scraping. + """ + scheme, netloc, path, _, _, _ = urlparse(link) + if path.endswith(self.source_extensions + self.binary_extensions + self.excluded_extensions): + result = False + elif self.skip_externals and not link.startswith(self.base_url): + result = False + elif not referrer.startswith(self.base_url): + result = False + elif rel not in ('homepage', 'download'): + result = False + elif scheme not in ('http', 'https', 'ftp'): + result = False + elif self._is_platform_dependent(link): + result = False + else: + host = netloc.split(':', 1)[0] + if host.lower() == 'localhost': + result = False + else: + result = True + logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, referrer, result) + return result + + def _fetch(self): + """ + Get a URL to fetch from the work queue, get the HTML page, examine its + links for download candidates and candidates for further scraping. + + This is a handy method to run in a thread. + """ + while True: + url = self._to_fetch.get() + try: + if url: + page = self.get_page(url) + if page is None: # e.g. after an error + continue + for link, rel in page.links: + if link not in self._seen: + try: + self._seen.add(link) + if (not self._process_download(link) and self._should_queue(link, url, rel)): + logger.debug('Queueing %s from %s', link, url) + self._to_fetch.put(link) + except MetadataInvalidError: # e.g. invalid versions + pass + except Exception as e: # pragma: no cover + self.errors.put(text_type(e)) + finally: + # always do this, to avoid hangs :-) + self._to_fetch.task_done() + if not url: + # logger.debug('Sentinel seen, quitting.') + break + + def get_page(self, url): + """ + Get the HTML for an URL, possibly from an in-memory cache. + + XXX TODO Note: this cache is never actually cleared. It's assumed that + the data won't get stale over the lifetime of a locator instance (not + necessarily true for the default_locator). + """ + # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api + scheme, netloc, path, _, _, _ = urlparse(url) + if scheme == 'file' and os.path.isdir(url2pathname(path)): + url = urljoin(ensure_slash(url), 'index.html') + + if url in self._page_cache: + result = self._page_cache[url] + logger.debug('Returning %s from cache: %s', url, result) + else: + host = netloc.split(':', 1)[0] + result = None + if host in self._bad_hosts: + logger.debug('Skipping %s due to bad host %s', url, host) + else: + req = Request(url, headers={'Accept-encoding': 'identity'}) + try: + logger.debug('Fetching %s', url) + resp = self.opener.open(req, timeout=self.timeout) + logger.debug('Fetched %s', url) + headers = resp.info() + content_type = headers.get('Content-Type', '') + if HTML_CONTENT_TYPE.match(content_type): + final_url = resp.geturl() + data = resp.read() + encoding = headers.get('Content-Encoding') + if encoding: + decoder = self.decoders[encoding] # fail if not found + data = decoder(data) + encoding = 'utf-8' + m = CHARSET.search(content_type) + if m: + encoding = m.group(1) + try: + data = data.decode(encoding) + except UnicodeError: # pragma: no cover + data = data.decode('latin-1') # fallback + result = Page(data, final_url) + self._page_cache[final_url] = result + except HTTPError as e: + if e.code != 404: + logger.exception('Fetch failed: %s: %s', url, e) + except URLError as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + with self._lock: + self._bad_hosts.add(host) + except Exception as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + finally: + self._page_cache[url] = result # even if None (failure) + return result + + _distname_re = re.compile(']*>([^<]+)<') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + page = self.get_page(self.base_url) + if not page: + raise DistlibException('Unable to get %s' % self.base_url) + for match in self._distname_re.finditer(page.data): + result.add(match.group(1)) + return result + + +class DirectoryLocator(Locator): + """ + This class locates distributions in a directory tree. + """ + + def __init__(self, path, **kwargs): + """ + Initialise an instance. + :param path: The root of the directory tree to search. + :param kwargs: Passed to the superclass constructor, + except for: + * recursive - if True (the default), subdirectories are + recursed into. If False, only the top-level directory + is searched, + """ + self.recursive = kwargs.pop('recursive', True) + super(DirectoryLocator, self).__init__(**kwargs) + path = os.path.abspath(path) + if not os.path.isdir(path): # pragma: no cover + raise DistlibException('Not a directory: %r' % path) + self.base_dir = path + + def should_include(self, filename, parent): + """ + Should a filename be considered as a candidate for a distribution + archive? As well as the filename, the directory which contains it + is provided, though not used by the current implementation. + """ + return filename.endswith(self.downloadable_extensions) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', pathname2url(os.path.abspath(fn)), '', '', '')) + info = self.convert_url_to_download_info(url, name) + if info: + self._update_version_data(result, info) + if not self.recursive: + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', pathname2url(os.path.abspath(fn)), '', '', '')) + info = self.convert_url_to_download_info(url, None) + if info: + result.add(info['name']) + if not self.recursive: + break + return result + + +class JSONLocator(Locator): + """ + This locator uses special extended metadata (not available on PyPI) and is + the basis of performant dependency resolution in distlib. Other locators + require archive downloads before dependencies can be determined! As you + might imagine, that can be slow. + """ + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + data = get_project_data(name) + if data: + for info in data.get('files', []): + if info['ptype'] != 'sdist' or info['pyversion'] != 'source': + continue + # We don't store summary in project metadata as it makes + # the data bigger for no benefit during dependency + # resolution + dist = make_dist(data['name'], + info['version'], + summary=data.get('summary', 'Placeholder for summary'), + scheme=self.scheme) + md = dist.metadata + md.source_url = info['url'] + # TODO SHA256 digest + if 'digest' in info and info['digest']: + dist.digest = ('md5', info['digest']) + md.dependencies = info.get('requirements', {}) + dist.exports = info.get('exports', {}) + result[dist.version] = dist + result['urls'].setdefault(dist.version, set()).add(info['url']) + return result + + +class DistPathLocator(Locator): + """ + This locator finds installed distributions in a path. It can be useful for + adding to an :class:`AggregatingLocator`. + """ + + def __init__(self, distpath, **kwargs): + """ + Initialise an instance. + + :param distpath: A :class:`DistributionPath` instance to search. + """ + super(DistPathLocator, self).__init__(**kwargs) + assert isinstance(distpath, DistributionPath) + self.distpath = distpath + + def _get_project(self, name): + dist = self.distpath.get_distribution(name) + if dist is None: + result = {'urls': {}, 'digests': {}} + else: + result = { + dist.version: dist, + 'urls': { + dist.version: set([dist.source_url]) + }, + 'digests': { + dist.version: set([None]) + } + } + return result + + +class AggregatingLocator(Locator): + """ + This class allows you to chain and/or merge a list of locators. + """ + + def __init__(self, *locators, **kwargs): + """ + Initialise an instance. + + :param locators: The list of locators to search. + :param kwargs: Passed to the superclass constructor, + except for: + * merge - if False (the default), the first successful + search from any of the locators is returned. If True, + the results from all locators are merged (this can be + slow). + """ + self.merge = kwargs.pop('merge', False) + self.locators = locators + super(AggregatingLocator, self).__init__(**kwargs) + + def clear_cache(self): + super(AggregatingLocator, self).clear_cache() + for locator in self.locators: + locator.clear_cache() + + def _set_scheme(self, value): + self._scheme = value + for locator in self.locators: + locator.scheme = value + + scheme = property(Locator.scheme.fget, _set_scheme) + + def _get_project(self, name): + result = {} + for locator in self.locators: + d = locator.get_project(name) + if d: + if self.merge: + files = result.get('urls', {}) + digests = result.get('digests', {}) + # next line could overwrite result['urls'], result['digests'] + result.update(d) + df = result.get('urls') + if files and df: + for k, v in files.items(): + if k in df: + df[k] |= v + else: + df[k] = v + dd = result.get('digests') + if digests and dd: + dd.update(digests) + else: + # See issue #18. If any dists are found and we're looking + # for specific constraints, we only return something if + # a match is found. For example, if a DirectoryLocator + # returns just foo (1.0) while we're looking for + # foo (>= 2.0), we'll pretend there was nothing there so + # that subsequent locators can be queried. Otherwise we + # would just return foo (1.0) which would then lead to a + # failure to find foo (>= 2.0), because other locators + # weren't searched. Note that this only matters when + # merge=False. + if self.matcher is None: + found = True + else: + found = False + for k in d: + if self.matcher.match(k): + found = True + break + if found: + result = d + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for locator in self.locators: + try: + result |= locator.get_distribution_names() + except NotImplementedError: + pass + return result + + +# We use a legacy scheme simply because most of the dists on PyPI use legacy +# versions which don't conform to PEP 440. +default_locator = AggregatingLocator( + # JSONLocator(), # don't use as PEP 426 is withdrawn + SimpleScrapingLocator('https://pypi.org/simple/', timeout=3.0), + scheme='legacy') + +locate = default_locator.locate + + +class DependencyFinder(object): + """ + Locate dependencies for distributions. + """ + + def __init__(self, locator=None): + """ + Initialise an instance, using the specified locator + to locate distributions. + """ + self.locator = locator or default_locator + self.scheme = get_scheme(self.locator.scheme) + + def add_distribution(self, dist): + """ + Add a distribution to the finder. This will update internal information + about who provides what. + :param dist: The distribution to add. + """ + logger.debug('adding distribution %s', dist) + name = dist.key + self.dists_by_name[name] = dist + self.dists[(name, dist.version)] = dist + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + self.provided.setdefault(name, set()).add((version, dist)) + + def remove_distribution(self, dist): + """ + Remove a distribution from the finder. This will update internal + information about who provides what. + :param dist: The distribution to remove. + """ + logger.debug('removing distribution %s', dist) + name = dist.key + del self.dists_by_name[name] + del self.dists[(name, dist.version)] + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Remove from provided: %s, %s, %s', name, version, dist) + s = self.provided[name] + s.remove((version, dist)) + if not s: + del self.provided[name] + + def get_matcher(self, reqt): + """ + Get a version matcher for a requirement. + :param reqt: The requirement + :type reqt: str + :return: A version matcher (an instance of + :class:`distlib.version.Matcher`). + """ + try: + matcher = self.scheme.matcher(reqt) + except UnsupportedVersionError: # pragma: no cover + # XXX compat-mode if cannot read the version + name = reqt.split()[0] + matcher = self.scheme.matcher(name) + return matcher + + def find_providers(self, reqt): + """ + Find the distributions which can fulfill a requirement. + + :param reqt: The requirement. + :type reqt: str + :return: A set of distribution which can fulfill the requirement. + """ + matcher = self.get_matcher(reqt) + name = matcher.key # case-insensitive + result = set() + provided = self.provided + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + result.add(provider) + break + return result + + def try_to_replace(self, provider, other, problems): + """ + Attempt to replace one provider with another. This is typically used + when resolving dependencies from multiple sources, e.g. A requires + (B >= 1.0) while C requires (B >= 1.1). + + For successful replacement, ``provider`` must meet all the requirements + which ``other`` fulfills. + + :param provider: The provider we are trying to replace with. + :param other: The provider we're trying to replace. + :param problems: If False is returned, this will contain what + problems prevented replacement. This is currently + a tuple of the literal string 'cantreplace', + ``provider``, ``other`` and the set of requirements + that ``provider`` couldn't fulfill. + :return: True if we can replace ``other`` with ``provider``, else + False. + """ + rlist = self.reqts[other] + unmatched = set() + for s in rlist: + matcher = self.get_matcher(s) + if not matcher.match(provider.version): + unmatched.add(s) + if unmatched: + # can't replace other with provider + problems.add(('cantreplace', provider, other, frozenset(unmatched))) + result = False + else: + # can replace other with provider + self.remove_distribution(other) + del self.reqts[other] + for s in rlist: + self.reqts.setdefault(provider, set()).add(s) + self.add_distribution(provider) + result = True + return result + + def find(self, requirement, meta_extras=None, prereleases=False): + """ + Find a distribution and all distributions it depends on. + + :param requirement: The requirement specifying the distribution to + find, or a Distribution instance. + :param meta_extras: A list of meta extras such as :test:, :build: and + so on. + :param prereleases: If ``True``, allow pre-release versions to be + returned - otherwise, don't return prereleases + unless they're all that's available. + + Return a set of :class:`Distribution` instances and a set of + problems. + + The distributions returned should be such that they have the + :attr:`required` attribute set to ``True`` if they were + from the ``requirement`` passed to ``find()``, and they have the + :attr:`build_time_dependency` attribute set to ``True`` unless they + are post-installation dependencies of the ``requirement``. + + The problems should be a tuple consisting of the string + ``'unsatisfied'`` and the requirement which couldn't be satisfied + by any distribution known to the locator. + """ + + self.provided = {} + self.dists = {} + self.dists_by_name = {} + self.reqts = {} + + meta_extras = set(meta_extras or []) + if ':*:' in meta_extras: + meta_extras.remove(':*:') + # :meta: and :run: are implicitly included + meta_extras |= set([':test:', ':build:', ':dev:']) + + if isinstance(requirement, Distribution): + dist = odist = requirement + logger.debug('passed %s as requirement', odist) + else: + dist = odist = self.locator.locate(requirement, prereleases=prereleases) + if dist is None: + raise DistlibException('Unable to locate %r' % requirement) + logger.debug('located %s', odist) + dist.requested = True + problems = set() + todo = set([dist]) + install_dists = set([odist]) + while todo: + dist = todo.pop() + name = dist.key # case-insensitive + if name not in self.dists_by_name: + self.add_distribution(dist) + else: + # import pdb; pdb.set_trace() + other = self.dists_by_name[name] + if other != dist: + self.try_to_replace(dist, other, problems) + + ireqts = dist.run_requires | dist.meta_requires + sreqts = dist.build_requires + ereqts = set() + if meta_extras and dist in install_dists: + for key in ('test', 'build', 'dev'): + e = ':%s:' % key + if e in meta_extras: + ereqts |= getattr(dist, '%s_requires' % key) + all_reqts = ireqts | sreqts | ereqts + for r in all_reqts: + providers = self.find_providers(r) + if not providers: + logger.debug('No providers found for %r', r) + provider = self.locator.locate(r, prereleases=prereleases) + # If no provider is found and we didn't consider + # prereleases, consider them now. + if provider is None and not prereleases: + provider = self.locator.locate(r, prereleases=True) + if provider is None: + logger.debug('Cannot satisfy %r', r) + problems.add(('unsatisfied', r)) + else: + n, v = provider.key, provider.version + if (n, v) not in self.dists: + todo.add(provider) + providers.add(provider) + if r in ireqts and dist in install_dists: + install_dists.add(provider) + logger.debug('Adding %s to install_dists', provider.name_and_version) + for p in providers: + name = p.key + if name not in self.dists_by_name: + self.reqts.setdefault(p, set()).add(r) + else: + other = self.dists_by_name[name] + if other != p: + # see if other can be replaced by p + self.try_to_replace(p, other, problems) + + dists = set(self.dists.values()) + for dist in dists: + dist.build_time_dependency = dist not in install_dists + if dist.build_time_dependency: + logger.debug('%s is a build-time dependency only.', dist.name_and_version) + logger.debug('find done for %s', odist) + return dists, problems diff --git a/.venv/Lib/site-packages/distlib/manifest.py b/.venv/Lib/site-packages/distlib/manifest.py new file mode 100644 index 00000000..420dcf12 --- /dev/null +++ b/.venv/Lib/site-packages/distlib/manifest.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2023 Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Class representing the list of files in a distribution. + +Equivalent to distutils.filelist, but fixes some problems. +""" +import fnmatch +import logging +import os +import re +import sys + +from . import DistlibException +from .compat import fsdecode +from .util import convert_path + + +__all__ = ['Manifest'] + +logger = logging.getLogger(__name__) + +# a \ followed by some spaces + EOL +_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M) +_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) + +# +# Due to the different results returned by fnmatch.translate, we need +# to do slightly different processing for Python 2.7 and 3.2 ... this needed +# to be brought in for Python 3.6 onwards. +# +_PYTHON_VERSION = sys.version_info[:2] + + +class Manifest(object): + """ + A list of files built by exploring the filesystem and filtered by applying various + patterns to what we find there. + """ + + def __init__(self, base=None): + """ + Initialise an instance. + + :param base: The base directory to explore under. + """ + self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) + self.prefix = self.base + os.sep + self.allfiles = None + self.files = set() + + # + # Public API + # + + def findall(self): + """Find all files under the base and set ``allfiles`` to the absolute + pathnames of files found. + """ + from stat import S_ISREG, S_ISDIR, S_ISLNK + + self.allfiles = allfiles = [] + root = self.base + stack = [root] + pop = stack.pop + push = stack.append + + while stack: + root = pop() + names = os.listdir(root) + + for name in names: + fullname = os.path.join(root, name) + + # Avoid excess stat calls -- just one will do, thank you! + stat = os.stat(fullname) + mode = stat.st_mode + if S_ISREG(mode): + allfiles.append(fsdecode(fullname)) + elif S_ISDIR(mode) and not S_ISLNK(mode): + push(fullname) + + def add(self, item): + """ + Add a file to the manifest. + + :param item: The pathname to add. This can be relative to the base. + """ + if not item.startswith(self.prefix): + item = os.path.join(self.base, item) + self.files.add(os.path.normpath(item)) + + def add_many(self, items): + """ + Add a list of files to the manifest. + + :param items: The pathnames to add. These can be relative to the base. + """ + for item in items: + self.add(item) + + def sorted(self, wantdirs=False): + """ + Return sorted files in directory order + """ + + def add_dir(dirs, d): + dirs.add(d) + logger.debug('add_dir added %s', d) + if d != self.base: + parent, _ = os.path.split(d) + assert parent not in ('', '/') + add_dir(dirs, parent) + + result = set(self.files) # make a copy! + if wantdirs: + dirs = set() + for f in result: + add_dir(dirs, os.path.dirname(f)) + result |= dirs + return [os.path.join(*path_tuple) for path_tuple in + sorted(os.path.split(path) for path in result)] + + def clear(self): + """Clear all collected files.""" + self.files = set() + self.allfiles = [] + + def process_directive(self, directive): + """ + Process a directive which either adds some files from ``allfiles`` to + ``files``, or removes some files from ``files``. + + :param directive: The directive to process. This should be in a format + compatible with distutils ``MANIFEST.in`` files: + + http://docs.python.org/distutils/sourcedist.html#commands + """ + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dirpattern). + action, patterns, thedir, dirpattern = self._parse_directive(directive) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=True): + logger.warning('no files found matching %r', pattern) + + elif action == 'exclude': + for pattern in patterns: + self._exclude_pattern(pattern, anchor=True) + + elif action == 'global-include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=False): + logger.warning('no files found matching %r ' + 'anywhere in distribution', pattern) + + elif action == 'global-exclude': + for pattern in patterns: + self._exclude_pattern(pattern, anchor=False) + + elif action == 'recursive-include': + for pattern in patterns: + if not self._include_pattern(pattern, prefix=thedir): + logger.warning('no files found matching %r ' + 'under directory %r', pattern, thedir) + + elif action == 'recursive-exclude': + for pattern in patterns: + self._exclude_pattern(pattern, prefix=thedir) + + elif action == 'graft': + if not self._include_pattern(None, prefix=dirpattern): + logger.warning('no directories found matching %r', + dirpattern) + + elif action == 'prune': + if not self._exclude_pattern(None, prefix=dirpattern): + logger.warning('no previously-included directories found ' + 'matching %r', dirpattern) + else: # pragma: no cover + # This should never happen, as it should be caught in + # _parse_template_line + raise DistlibException( + 'invalid action %r' % action) + + # + # Private API + # + + def _parse_directive(self, directive): + """ + Validate a directive. + :param directive: The directive to validate. + :return: A tuple of action, patterns, thedir, dir_patterns + """ + words = directive.split() + if len(words) == 1 and words[0] not in ('include', 'exclude', + 'global-include', + 'global-exclude', + 'recursive-include', + 'recursive-exclude', + 'graft', 'prune'): + # no action given, let's use the default 'include' + words.insert(0, 'include') + + action = words[0] + patterns = thedir = dir_pattern = None + + if action in ('include', 'exclude', + 'global-include', 'global-exclude'): + if len(words) < 2: + raise DistlibException( + '%r expects ...' % action) + + patterns = [convert_path(word) for word in words[1:]] + + elif action in ('recursive-include', 'recursive-exclude'): + if len(words) < 3: + raise DistlibException( + '%r expects ...' % action) + + thedir = convert_path(words[1]) + patterns = [convert_path(word) for word in words[2:]] + + elif action in ('graft', 'prune'): + if len(words) != 2: + raise DistlibException( + '%r expects a single ' % action) + + dir_pattern = convert_path(words[1]) + + else: + raise DistlibException('unknown action %r' % action) + + return action, patterns, thedir, dir_pattern + + def _include_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. + + Patterns are not quite the same as implemented by the 'fnmatch' + module: '*' and '?' match non-special characters, where "special" + is platform-dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found. + """ + # XXX docstring lying about what the special chars are? + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + + # delayed loading of allfiles list + if self.allfiles is None: + self.findall() + + for name in self.allfiles: + if pattern_re.search(name): + self.files.add(name) + found = True + return found + + def _exclude_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. + + Other parameters are the same as for 'include_pattern()', above. + The list 'self.files' is modified in place. Return True if files are + found. + + This API is public to allow e.g. exclusion of SCM subdirs, e.g. when + packaging source distributions + """ + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + for f in list(self.files): + if pattern_re.search(f): + self.files.remove(f) + found = True + return found + + def _translate_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Translate a shell-like wildcard pattern to a compiled regular + expression. + + Return the compiled regex. If 'is_regex' true, + then 'pattern' is directly compiled to a regex (if it's a string) + or just returned as-is (assumes it's a regex object). + """ + if is_regex: + if isinstance(pattern, str): + return re.compile(pattern) + else: + return pattern + + if _PYTHON_VERSION > (3, 2): + # ditch start and end characters + start, _, end = self._glob_to_re('_').partition('_') + + if pattern: + pattern_re = self._glob_to_re(pattern) + if _PYTHON_VERSION > (3, 2): + assert pattern_re.startswith(start) and pattern_re.endswith(end) + else: + pattern_re = '' + + base = re.escape(os.path.join(self.base, '')) + if prefix is not None: + # ditch end of pattern character + if _PYTHON_VERSION <= (3, 2): + empty_pattern = self._glob_to_re('') + prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] + else: + prefix_re = self._glob_to_re(prefix) + assert prefix_re.startswith(start) and prefix_re.endswith(end) + prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] + sep = os.sep + if os.sep == '\\': + sep = r'\\' + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + sep.join((prefix_re, + '.*' + pattern_re)) + else: + pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] + pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, + pattern_re, end) + else: # no prefix -- respect anchor flag + if anchor: + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + pattern_re + else: + pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) + + return re.compile(pattern_re) + + def _glob_to_re(self, pattern): + """Translate a shell-like glob pattern to a regular expression. + + Return a string containing the regex. Differs from + 'fnmatch.translate()' in that '*' does not match "special characters" + (which are platform-specific). + """ + pattern_re = fnmatch.translate(pattern) + + # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which + # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, + # and by extension they shouldn't match such "special characters" under + # any OS. So change all non-escaped dots in the RE to match any + # character except the special characters (currently: just os.sep). + sep = os.sep + if os.sep == '\\': + # we're using a regex to manipulate a regex, so we need + # to escape the backslash twice + sep = r'\\\\' + escaped = r'\1[^%s]' % sep + pattern_re = re.sub(r'((? y, + '!=': lambda x, y: x != y, + '<': lambda x, y: x < y, + '<=': lambda x, y: x == y or x < y, + '>': lambda x, y: x > y, + '>=': lambda x, y: x == y or x > y, + 'and': lambda x, y: x and y, + 'or': lambda x, y: x or y, + 'in': lambda x, y: x in y, + 'not in': lambda x, y: x not in y, + } + + def evaluate(self, expr, context): + """ + Evaluate a marker expression returned by the :func:`parse_requirement` + function in the specified context. + """ + if isinstance(expr, string_types): + if expr[0] in '\'"': + result = expr[1:-1] + else: + if expr not in context: + raise SyntaxError('unknown variable: %s' % expr) + result = context[expr] + else: + assert isinstance(expr, dict) + op = expr['op'] + if op not in self.operations: + raise NotImplementedError('op not implemented: %s' % op) + elhs = expr['lhs'] + erhs = expr['rhs'] + if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): + raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) + + lhs = self.evaluate(elhs, context) + rhs = self.evaluate(erhs, context) + if ((_is_version_marker(elhs) or _is_version_marker(erhs)) and + op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): + lhs = LV(lhs) + rhs = LV(rhs) + elif _is_version_marker(elhs) and op in ('in', 'not in'): + lhs = LV(lhs) + rhs = _get_versions(rhs) + result = self.operations[op](lhs, rhs) + return result + + +_DIGITS = re.compile(r'\d+\.\d+') + + +def default_context(): + + def format_full_version(info): + version = '%s.%s.%s' % (info.major, info.minor, info.micro) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + if hasattr(sys, 'implementation'): + implementation_version = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + implementation_version = '0' + implementation_name = '' + + ppv = platform.python_version() + m = _DIGITS.match(ppv) + pv = m.group(0) + result = { + 'implementation_name': implementation_name, + 'implementation_version': implementation_version, + 'os_name': os.name, + 'platform_machine': platform.machine(), + 'platform_python_implementation': platform.python_implementation(), + 'platform_release': platform.release(), + 'platform_system': platform.system(), + 'platform_version': platform.version(), + 'platform_in_venv': str(in_venv()), + 'python_full_version': ppv, + 'python_version': pv, + 'sys_platform': sys.platform, + } + return result + + +DEFAULT_CONTEXT = default_context() +del default_context + +evaluator = Evaluator() + + +def interpret(marker, execution_context=None): + """ + Interpret a marker and return a result depending on environment. + + :param marker: The marker to interpret. + :type marker: str + :param execution_context: The context used for name lookup. + :type execution_context: mapping + """ + try: + expr, rest = parse_marker(marker) + except Exception as e: + raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) + if rest and rest[0] != '#': + raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) + context = dict(DEFAULT_CONTEXT) + if execution_context: + context.update(execution_context) + return evaluator.evaluate(expr, context) diff --git a/.venv/Lib/site-packages/distlib/metadata.py b/.venv/Lib/site-packages/distlib/metadata.py new file mode 100644 index 00000000..ce9a34b3 --- /dev/null +++ b/.venv/Lib/site-packages/distlib/metadata.py @@ -0,0 +1,1031 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Implementation of the Metadata for Python packages PEPs. + +Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and 2.2). +""" +from __future__ import unicode_literals + +import codecs +from email import message_from_file +import json +import logging +import re + +from . import DistlibException, __version__ +from .compat import StringIO, string_types, text_type +from .markers import interpret +from .util import extract_by_key, get_extras +from .version import get_scheme, PEP440_VERSION_RE + +logger = logging.getLogger(__name__) + + +class MetadataMissingError(DistlibException): + """A required metadata is missing""" + + +class MetadataConflictError(DistlibException): + """Attempt to read or write metadata fields that are conflictual.""" + + +class MetadataUnrecognizedVersionError(DistlibException): + """Unknown metadata version number.""" + + +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" + + +# public API of this module +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] + +# Encoding used for the PKG-INFO files +PKG_INFO_ENCODING = 'utf-8' + +# preferred version. Hopefully will be changed +# to 1.2 once PEP 345 is supported everywhere +PKG_INFO_PREFERRED_VERSION = '1.1' + +_LINE_PREFIX_1_2 = re.compile('\n \\|') +_LINE_PREFIX_PRE_1_2 = re.compile('\n ') +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Summary', 'Description', 'Keywords', 'Home-page', + 'Author', 'Author-email', 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', 'License', 'Classifier', 'Download-URL', 'Obsoletes', + 'Provides', 'Requires') + +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', 'Download-URL') + +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External') + +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', 'Obsoletes-Dist', 'Requires-External', + 'Maintainer', 'Maintainer-email', 'Project-URL') + +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', 'Obsoleted-By', 'Setup-Requires-Dist', + 'Extension', 'Provides-Extra') + +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension') + +# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in +# the metadata. Include them in the tuple literal below to allow them +# (for now). +# Ditto for Obsoletes - see issue #140. +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', 'Requires', 'Provides', 'Obsoletes') + +_566_MARKERS = ('Description-Content-Type', ) + +_643_MARKERS = ('Dynamic', 'License-File') + +_643_FIELDS = _566_FIELDS + _643_MARKERS + +_ALL_FIELDS = set() +_ALL_FIELDS.update(_241_FIELDS) +_ALL_FIELDS.update(_314_FIELDS) +_ALL_FIELDS.update(_345_FIELDS) +_ALL_FIELDS.update(_426_FIELDS) +_ALL_FIELDS.update(_566_FIELDS) +_ALL_FIELDS.update(_643_FIELDS) + +EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + + +def _version2fieldlist(version): + if version == '1.0': + return _241_FIELDS + elif version == '1.1': + return _314_FIELDS + elif version == '1.2': + return _345_FIELDS + elif version in ('1.3', '2.1'): + # avoid adding field names if already there + return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS) + elif version == '2.0': + raise ValueError('Metadata 2.0 is withdrawn and not supported') + # return _426_FIELDS + elif version == '2.2': + return _643_FIELDS + raise MetadataUnrecognizedVersionError(version) + + +def _best_version(fields): + """Detect the best version depending on the fields used.""" + + def _has_marker(keys, markers): + return any(marker in keys for marker in markers) + + keys = [key for key, value in fields.items() if value not in ([], 'UNKNOWN', None)] + possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.1', '2.2'] # 2.0 removed + + # first let's try to see if a field is not part of one of the version + for key in keys: + if key not in _241_FIELDS and '1.0' in possible_versions: + possible_versions.remove('1.0') + logger.debug('Removed 1.0 due to %s', key) + if key not in _314_FIELDS and '1.1' in possible_versions: + possible_versions.remove('1.1') + logger.debug('Removed 1.1 due to %s', key) + if key not in _345_FIELDS and '1.2' in possible_versions: + possible_versions.remove('1.2') + logger.debug('Removed 1.2 due to %s', key) + if key not in _566_FIELDS and '1.3' in possible_versions: + possible_versions.remove('1.3') + logger.debug('Removed 1.3 due to %s', key) + if key not in _566_FIELDS and '2.1' in possible_versions: + if key != 'Description': # In 2.1, description allowed after headers + possible_versions.remove('2.1') + logger.debug('Removed 2.1 due to %s', key) + if key not in _643_FIELDS and '2.2' in possible_versions: + possible_versions.remove('2.2') + logger.debug('Removed 2.2 due to %s', key) + # if key not in _426_FIELDS and '2.0' in possible_versions: + # possible_versions.remove('2.0') + # logger.debug('Removed 2.0 due to %s', key) + + # possible_version contains qualified versions + if len(possible_versions) == 1: + return possible_versions[0] # found ! + elif len(possible_versions) == 0: + logger.debug('Out of options - unknown metadata set: %s', fields) + raise MetadataConflictError('Unknown metadata set') + + # let's see if one unique marker is found + is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) + is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) + is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) + # is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) + is_2_2 = '2.2' in possible_versions and _has_marker(keys, _643_MARKERS) + if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_2) > 1: + raise MetadataConflictError('You used incompatible 1.1/1.2/2.1/2.2 fields') + + # we have the choice, 1.0, or 1.2, 2.1 or 2.2 + # - 1.0 has a broken Summary field but works with all tools + # - 1.1 is to avoid + # - 1.2 fixes Summary but has little adoption + # - 2.1 adds more features + # - 2.2 is the latest + if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_2: + # we couldn't find any specific marker + if PKG_INFO_PREFERRED_VERSION in possible_versions: + return PKG_INFO_PREFERRED_VERSION + if is_1_1: + return '1.1' + if is_1_2: + return '1.2' + if is_2_1: + return '2.1' + # if is_2_2: + # return '2.2' + + return '2.2' + + +# This follows the rules about transforming keys as described in +# https://www.python.org/dev/peps/pep-0566/#id17 +_ATTR2FIELD = {name.lower().replace("-", "_"): name for name in _ALL_FIELDS} +_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} + +_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') +_VERSIONS_FIELDS = ('Requires-Python', ) +_VERSION_FIELDS = ('Version', ) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', 'Requires', 'Provides', 'Obsoletes-Dist', 'Provides-Dist', + 'Requires-Dist', 'Requires-External', 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', + 'Provides-Extra', 'Extension', 'License-File') +_LISTTUPLEFIELDS = ('Project-URL', ) + +_ELEMENTSFIELD = ('Keywords', ) + +_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') + +_MISSING = object() + +_FILESAFE = re.compile('[^A-Za-z0-9.]+') + + +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + + +class LegacyMetadata(object): + """The legacy metadata of a release. + + Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can + instantiate the class with one of these arguments (or none): + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content + - *mapping* is a dict-like object + - *scheme* is a version scheme name + """ + + # TODO document the mapping API and UNKNOWN default key + + def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._fields = {} + self.requires_files = [] + self._dependencies = None + self.scheme = scheme + if path is not None: + self.read(path) + elif fileobj is not None: + self.read_file(fileobj) + elif mapping is not None: + self.update(mapping) + self.set_metadata_version() + + def set_metadata_version(self): + self._fields['Metadata-Version'] = _best_version(self._fields) + + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) + + def __getitem__(self, name): + return self.get(name) + + def __setitem__(self, name, value): + return self.set(name, value) + + def __delitem__(self, name): + field_name = self._convert_name(name) + try: + del self._fields[field_name] + except KeyError: + raise KeyError(name) + + def __contains__(self, name): + return (name in self._fields or self._convert_name(name) in self._fields) + + def _convert_name(self, name): + if name in _ALL_FIELDS: + return name + name = name.replace('-', '_').lower() + return _ATTR2FIELD.get(name, name) + + def _default_value(self, name): + if name in _LISTFIELDS or name in _ELEMENTSFIELD: + return [] + return 'UNKNOWN' + + def _remove_line_prefix(self, value): + if self.metadata_version in ('1.0', '1.1'): + return _LINE_PREFIX_PRE_1_2.sub('\n', value) + else: + return _LINE_PREFIX_1_2.sub('\n', value) + + def __getattr__(self, name): + if name in _ATTR2FIELD: + return self[name] + raise AttributeError(name) + + # + # Public API + # + + def get_fullname(self, filesafe=False): + """ + Return the distribution name with version. + + If filesafe is true, return a filename-escaped form. + """ + return _get_name_and_version(self['Name'], self['Version'], filesafe) + + def is_field(self, name): + """return True if name is a valid metadata key""" + name = self._convert_name(name) + return name in _ALL_FIELDS + + def is_multi_field(self, name): + name = self._convert_name(name) + return name in _LISTFIELDS + + def read(self, filepath): + """Read the metadata values from a file path.""" + fp = codecs.open(filepath, 'r', encoding='utf-8') + try: + self.read_file(fp) + finally: + fp.close() + + def read_file(self, fileob): + """Read the metadata values from a file object.""" + msg = message_from_file(fileob) + self._fields['Metadata-Version'] = msg['metadata-version'] + + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue + if field in _LISTFIELDS: + # we can have multiple lines + values = msg.get_all(field) + if field in _LISTTUPLEFIELDS and values is not None: + values = [tuple(value.split(',')) for value in values] + self.set(field, values) + else: + # single line + value = msg[field] + if value is not None and value != 'UNKNOWN': + self.set(field, value) + + # PEP 566 specifies that the body be used for the description, if + # available + body = msg.get_payload() + self["Description"] = body if body else self["Description"] + # logger.debug('Attempting to set metadata for %s', self) + # self.set_metadata_version() + + def write(self, filepath, skip_unknown=False): + """Write the metadata fields to filepath.""" + fp = codecs.open(filepath, 'w', encoding='utf-8') + try: + self.write_file(fp, skip_unknown) + finally: + fp.close() + + def write_file(self, fileobject, skip_unknown=False): + """Write the PKG-INFO format data to a file object.""" + self.set_metadata_version() + + for field in _version2fieldlist(self['Metadata-Version']): + values = self.get(field) + if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): + continue + if field in _ELEMENTSFIELD: + self._write_field(fileobject, field, ','.join(values)) + continue + if field not in _LISTFIELDS: + if field == 'Description': + if self.metadata_version in ('1.0', '1.1'): + values = values.replace('\n', '\n ') + else: + values = values.replace('\n', '\n |') + values = [values] + + if field in _LISTTUPLEFIELDS: + values = [','.join(value) for value in values] + + for value in values: + self._write_field(fileobject, field, value) + + def update(self, other=None, **kwargs): + """Set metadata values from the given iterable `other` and kwargs. + + Behavior is like `dict.update`: If `other` has a ``keys`` method, + they are looped over and ``self[key]`` is assigned ``other[key]``. + Else, ``other`` is an iterable of ``(key, value)`` iterables. + + Keys that don't match a metadata field or that have an empty value are + dropped. + """ + + def _set(key, value): + if key in _ATTR2FIELD and value: + self.set(self._convert_name(key), value) + + if not other: + # other is None or empty container + pass + elif hasattr(other, 'keys'): + for k in other.keys(): + _set(k, other[k]) + else: + for k, v in other: + _set(k, v) + + if kwargs: + for k, v in kwargs.items(): + _set(k, v) + + def set(self, name, value): + """Control then set a metadata field.""" + name = self._convert_name(name) + + if ((name in _ELEMENTSFIELD or name == 'Platform') and not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [v.strip() for v in value.split(',')] + else: + value = [] + elif (name in _LISTFIELDS and not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [value] + else: + value = [] + + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + + scheme = get_scheme(self.scheme) + if name in _PREDICATE_FIELDS and value is not None: + for v in value: + # check that the values are valid + if not scheme.is_valid_matcher(v.split(';')[0]): + logger.warning("'%s': '%s' is not valid (field '%s')", project_name, v, name) + # FIXME this rejects UNKNOWN, is that right? + elif name in _VERSIONS_FIELDS and value is not None: + if not scheme.is_valid_constraint_list(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", project_name, value, name) + elif name in _VERSION_FIELDS and value is not None: + if not scheme.is_valid_version(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", project_name, value, name) + + if name in _UNICODEFIELDS: + if name == 'Description': + value = self._remove_line_prefix(value) + + self._fields[name] = value + + def get(self, name, default=_MISSING): + """Get a metadata field.""" + name = self._convert_name(name) + if name not in self._fields: + if default is _MISSING: + default = self._default_value(name) + return default + if name in _UNICODEFIELDS: + value = self._fields[name] + return value + elif name in _LISTFIELDS: + value = self._fields[name] + if value is None: + return [] + res = [] + for val in value: + if name not in _LISTTUPLEFIELDS: + res.append(val) + else: + # That's for Project-URL + res.append((val[0], val[1])) + return res + + elif name in _ELEMENTSFIELD: + value = self._fields[name] + if isinstance(value, string_types): + return value.split(',') + return self._fields[name] + + def check(self, strict=False): + """Check if the metadata is compliant. If strict is True then raise if + no Name or Version are provided""" + self.set_metadata_version() + + # XXX should check the versions (if the file was loaded) + missing, warnings = [], [] + + for attr in ('Name', 'Version'): # required by PEP 345 + if attr not in self: + missing.append(attr) + + if strict and missing != []: + msg = 'missing required metadata: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + + for attr in ('Home-page', 'Author'): + if attr not in self: + missing.append(attr) + + # checking metadata 1.2 (XXX needs to check 1.1, 1.0) + if self['Metadata-Version'] != '1.2': + return missing, warnings + + scheme = get_scheme(self.scheme) + + def are_valid_constraints(value): + for v in value: + if not scheme.is_valid_matcher(v.split(';')[0]): + return False + return True + + for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), + (_VERSIONS_FIELDS, scheme.is_valid_constraint_list), (_VERSION_FIELDS, + scheme.is_valid_version)): + for field in fields: + value = self.get(field, None) + if value is not None and not controller(value): + warnings.append("Wrong value for '%s': %s" % (field, value)) + + return missing, warnings + + def todict(self, skip_missing=False): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + This is as per https://www.python.org/dev/peps/pep-0566/#id17. + """ + self.set_metadata_version() + + fields = _version2fieldlist(self['Metadata-Version']) + + data = {} + + for field_name in fields: + if not skip_missing or field_name in self._fields: + key = _FIELD2ATTR[field_name] + if key != 'project_url': + data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] + + return data + + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + + # Mapping API + # TODO could add iter* variants + + def keys(self): + return list(_version2fieldlist(self['Metadata-Version'])) + + def __iter__(self): + for key in self.keys(): + yield key + + def values(self): + return [self[key] for key in self.keys()] + + def items(self): + return [(key, self[key]) for key in self.keys()] + + def __repr__(self): + return '<%s %s %s>' % (self.__class__.__name__, self.name, self.version) + + +METADATA_FILENAME = 'pydist.json' +WHEEL_METADATA_FILENAME = 'metadata.json' +LEGACY_METADATA_FILENAME = 'METADATA' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.1 + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + FIELDNAME_MATCHER = re.compile('^[A-Z]([0-9A-Z-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP440_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy', ), + } + + INDEX_KEYS = ('name version license summary description author ' + 'author_email keywords platform home_page classifiers ' + 'download_url') + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy', )), + 'version': (VERSION_MATCHER, ('legacy', )), + 'summary': (SUMMARY_MATCHER, ('legacy', )), + 'dynamic': (FIELDNAME_MATCHER, ('legacy', )), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + # import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with open(path, 'rb') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + if not isinstance(data, text_type): + data = data.decode('utf-8') + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + none_list = (None, list) + none_dict = (None, dict) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': none_list, + 'test_requires': none_list, + 'meta_requires': none_list, + 'extras': ('Provides-Extra', list), + 'modules': none_list, + 'namespaces': none_list, + 'exports': none_dict, + 'commands': none_dict, + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + del none_list, none_dict + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + if key not in ('commands', 'exports', 'modules', 'namespaces', 'classifiers'): + result = self._data.get(key, value) + else: + # special cases for PEP 459 + sentinel = object() + result = sentinel + d = self._data.get('extensions') + if d: + if key == 'commands': + result = d.get('python.commands', value) + elif key == 'classifiers': + d = d.get('python.details') + if d: + result = d.get(key, value) + else: + d = d.get('python.exports') + if not d: + d = self._data.get('python.exports') + if d: + result = d.get(key, value) + if result is sentinel: + result = value + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError("'%s' is an invalid value for " + "the '%s' property" % (value, key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + elif key not in ('commands', 'exports', 'modules', 'namespaces', 'classifiers'): + self._data[key] = value + else: + # special cases for PEP 459 + d = self._data.setdefault('extensions', {}) + if key == 'commands': + d['python.commands'] = value + elif key == 'classifiers': + d = d.setdefault('python.details', {}) + d[key] = value + else: + d = d.setdefault('python.exports', {}) + d[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = get_extras(extras or [], self.extras) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + # author = {} + # maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + ('extensions', 'python.details', 'license'): 'License', + 'summary': 'Summary', + 'description': 'Description', + ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', + ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', + ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', + 'source_url': 'Download-URL', + ('extensions', 'python.details', 'classifiers'): 'Classifier', + } + + def _to_legacy(self): + + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == "%s"' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + # import pdb; pdb.set_trace() + for nk, ok in self.LEGACY_MAPPING.items(): + if not isinstance(nk, tuple): + if nk in nmd: + result[ok] = nmd[nk] + else: + d = nmd + found = True + for k in nk: + try: + d = d[k] + except (KeyError, IndexError): + found = False + break + if found: + result[ok] = d + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: any other fields wanted + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2, sort_keys=True) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2, sort_keys=True) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + run_requires = self._data.setdefault('run_requires', []) + always = None + for entry in run_requires: + if 'environment' not in entry and 'extra' not in entry: + always = entry + break + if always is None: + always = {'requires': requirements} + run_requires.insert(0, always) + else: + rset = set(always['requires']) | set(requirements) + always['requires'] = sorted(rset) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, self.metadata_version, name, version) diff --git a/.venv/Lib/site-packages/distlib/resources.py b/.venv/Lib/site-packages/distlib/resources.py new file mode 100644 index 00000000..fef52aa1 --- /dev/null +++ b/.venv/Lib/site-packages/distlib/resources.py @@ -0,0 +1,358 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import bisect +import io +import logging +import os +import pkgutil +import sys +import types +import zipimport + +from . import DistlibException +from .util import cached_property, get_cache_base, Cache + +logger = logging.getLogger(__name__) + + +cache = None # created when needed + + +class ResourceCache(Cache): + def __init__(self, base=None): + if base is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('resource-cache')) + super(ResourceCache, self).__init__(base) + + def is_stale(self, resource, path): + """ + Is the cache stale for the given resource? + + :param resource: The :class:`Resource` being cached. + :param path: The path of the resource in the cache. + :return: True if the cache is stale. + """ + # Cache invalidation is a hard problem :-) + return True + + def get(self, resource): + """ + Get a resource into the cache, + + :param resource: A :class:`Resource` instance. + :return: The pathname of the resource in the cache. + """ + prefix, path = resource.finder.get_cache_info(resource) + if prefix is None: + result = path + else: + result = os.path.join(self.base, self.prefix_to_dir(prefix), path) + dirname = os.path.dirname(result) + if not os.path.isdir(dirname): + os.makedirs(dirname) + if not os.path.exists(result): + stale = True + else: + stale = self.is_stale(resource, path) + if stale: + # write the bytes of the resource to the cache location + with open(result, 'wb') as f: + f.write(resource.bytes) + return result + + +class ResourceBase(object): + def __init__(self, finder, name): + self.finder = finder + self.name = name + + +class Resource(ResourceBase): + """ + A class representing an in-package resource, such as a data file. This is + not normally instantiated by user code, but rather by a + :class:`ResourceFinder` which manages the resource. + """ + is_container = False # Backwards compatibility + + def as_stream(self): + """ + Get the resource as a stream. + + This is not a property to make it obvious that it returns a new stream + each time. + """ + return self.finder.get_stream(self) + + @cached_property + def file_path(self): + global cache + if cache is None: + cache = ResourceCache() + return cache.get(self) + + @cached_property + def bytes(self): + return self.finder.get_bytes(self) + + @cached_property + def size(self): + return self.finder.get_size(self) + + +class ResourceContainer(ResourceBase): + is_container = True # Backwards compatibility + + @cached_property + def resources(self): + return self.finder.get_resources(self) + + +class ResourceFinder(object): + """ + Resource finder for file system resources. + """ + + if sys.platform.startswith('java'): + skipped_extensions = ('.pyc', '.pyo', '.class') + else: + skipped_extensions = ('.pyc', '.pyo') + + def __init__(self, module): + self.module = module + self.loader = getattr(module, '__loader__', None) + self.base = os.path.dirname(getattr(module, '__file__', '')) + + def _adjust_path(self, path): + return os.path.realpath(path) + + def _make_path(self, resource_name): + # Issue #50: need to preserve type of path on Python 2.x + # like os.path._get_sep + if isinstance(resource_name, bytes): # should only happen on 2.x + sep = b'/' + else: + sep = '/' + parts = resource_name.split(sep) + parts.insert(0, self.base) + result = os.path.join(*parts) + return self._adjust_path(result) + + def _find(self, path): + return os.path.exists(path) + + def get_cache_info(self, resource): + return None, resource.path + + def find(self, resource_name): + path = self._make_path(resource_name) + if not self._find(path): + result = None + else: + if self._is_directory(path): + result = ResourceContainer(self, resource_name) + else: + result = Resource(self, resource_name) + result.path = path + return result + + def get_stream(self, resource): + return open(resource.path, 'rb') + + def get_bytes(self, resource): + with open(resource.path, 'rb') as f: + return f.read() + + def get_size(self, resource): + return os.path.getsize(resource.path) + + def get_resources(self, resource): + def allowed(f): + return (f != '__pycache__' and not + f.endswith(self.skipped_extensions)) + return set([f for f in os.listdir(resource.path) if allowed(f)]) + + def is_container(self, resource): + return self._is_directory(resource.path) + + _is_directory = staticmethod(os.path.isdir) + + def iterator(self, resource_name): + resource = self.find(resource_name) + if resource is not None: + todo = [resource] + while todo: + resource = todo.pop(0) + yield resource + if resource.is_container: + rname = resource.name + for name in resource.resources: + if not rname: + new_name = name + else: + new_name = '/'.join([rname, name]) + child = self.find(new_name) + if child.is_container: + todo.append(child) + else: + yield child + + +class ZipResourceFinder(ResourceFinder): + """ + Resource finder for resources in .zip files. + """ + def __init__(self, module): + super(ZipResourceFinder, self).__init__(module) + archive = self.loader.archive + self.prefix_len = 1 + len(archive) + # PyPy doesn't have a _files attr on zipimporter, and you can't set one + if hasattr(self.loader, '_files'): + self._files = self.loader._files + else: + self._files = zipimport._zip_directory_cache[archive] + self.index = sorted(self._files) + + def _adjust_path(self, path): + return path + + def _find(self, path): + path = path[self.prefix_len:] + if path in self._files: + result = True + else: + if path and path[-1] != os.sep: + path = path + os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + if not result: + logger.debug('_find failed: %r %r', path, self.loader.prefix) + else: + logger.debug('_find worked: %r %r', path, self.loader.prefix) + return result + + def get_cache_info(self, resource): + prefix = self.loader.archive + path = resource.path[1 + len(prefix):] + return prefix, path + + def get_bytes(self, resource): + return self.loader.get_data(resource.path) + + def get_stream(self, resource): + return io.BytesIO(self.get_bytes(resource)) + + def get_size(self, resource): + path = resource.path[self.prefix_len:] + return self._files[path][3] + + def get_resources(self, resource): + path = resource.path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + plen = len(path) + result = set() + i = bisect.bisect(self.index, path) + while i < len(self.index): + if not self.index[i].startswith(path): + break + s = self.index[i][plen:] + result.add(s.split(os.sep, 1)[0]) # only immediate children + i += 1 + return result + + def _is_directory(self, path): + path = path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + return result + + +_finder_registry = { + type(None): ResourceFinder, + zipimport.zipimporter: ZipResourceFinder +} + +try: + # In Python 3.6, _frozen_importlib -> _frozen_importlib_external + try: + import _frozen_importlib_external as _fi + except ImportError: + import _frozen_importlib as _fi + _finder_registry[_fi.SourceFileLoader] = ResourceFinder + _finder_registry[_fi.FileFinder] = ResourceFinder + # See issue #146 + _finder_registry[_fi.SourcelessFileLoader] = ResourceFinder + del _fi +except (ImportError, AttributeError): + pass + + +def register_finder(loader, finder_maker): + _finder_registry[type(loader)] = finder_maker + + +_finder_cache = {} + + +def finder(package): + """ + Return a resource finder for a package. + :param package: The name of the package. + :return: A :class:`ResourceFinder` instance for the package. + """ + if package in _finder_cache: + result = _finder_cache[package] + else: + if package not in sys.modules: + __import__(package) + module = sys.modules[package] + path = getattr(module, '__path__', None) + if path is None: + raise DistlibException('You cannot get a finder for a module, ' + 'only for a package') + loader = getattr(module, '__loader__', None) + finder_maker = _finder_registry.get(type(loader)) + if finder_maker is None: + raise DistlibException('Unable to locate finder for %r' % package) + result = finder_maker(module) + _finder_cache[package] = result + return result + + +_dummy_module = types.ModuleType(str('__dummy__')) + + +def finder_for_path(path): + """ + Return a resource finder for a path, which should represent a container. + + :param path: The path. + :return: A :class:`ResourceFinder` instance for the path. + """ + result = None + # calls any path hooks, gets importer into cache + pkgutil.get_importer(path) + loader = sys.path_importer_cache.get(path) + finder = _finder_registry.get(type(loader)) + if finder: + module = _dummy_module + module.__file__ = os.path.join(path, '') + module.__loader__ = loader + result = finder(module) + return result diff --git a/.venv/Lib/site-packages/distlib/scripts.py b/.venv/Lib/site-packages/distlib/scripts.py new file mode 100644 index 00000000..b1fc705b --- /dev/null +++ b/.venv/Lib/site-packages/distlib/scripts.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from io import BytesIO +import logging +import os +import re +import struct +import sys +import time +from zipfile import ZipInfo + +from .compat import sysconfig, detect_encoding, ZipFile +from .resources import finder +from .util import (FileOperator, get_export_entry, convert_path, get_executable, get_platform, in_venv) + +logger = logging.getLogger(__name__) + +_DEFAULT_MANIFEST = ''' + + + + + + + + + + + + +'''.strip() + +# check if Python is called on the first line with this expression +FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') +SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +import re +import sys +from %(module)s import %(import_name)s +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +''' + +# Pre-fetch the contents of all executable wrapper stubs. +# This is to address https://github.com/pypa/pip/issues/12666. +# When updating pip, we rename the old pip in place before installing the +# new version. If we try to fetch a wrapper *after* that rename, the finder +# machinery will be confused as the package is no longer available at the +# location where it was imported from. So we load everything into memory in +# advance. + +if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + DISTLIB_PACKAGE = __name__.rsplit('.', 1)[0] + + WRAPPERS = { + r.name: r.bytes + for r in finder(DISTLIB_PACKAGE).iterator("") + if r.name.endswith(".exe") + } + + +def enquote_executable(executable): + if ' ' in executable: + # make sure we quote only the executable in case of env + # for example /usr/bin/env "/dir with spaces/bin/jython" + # instead of "/usr/bin/env /dir with spaces/bin/jython" + # otherwise whole + if executable.startswith('/usr/bin/env '): + env, _executable = executable.split(' ', 1) + if ' ' in _executable and not _executable.startswith('"'): + executable = '%s "%s"' % (env, _executable) + else: + if not executable.startswith('"'): + executable = '"%s"' % executable + return executable + + +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable + + +class ScriptMaker(object): + """ + A class to copy or create scripts from source scripts or callable + specifications. + """ + script_template = SCRIPT_TEMPLATE + + executable = None # for shebangs + + def __init__(self, source_dir, target_dir, add_launchers=True, dry_run=False, fileop=None): + self.source_dir = source_dir + self.target_dir = target_dir + self.add_launchers = add_launchers + self.force = False + self.clobber = False + # It only makes sense to set mode bits on POSIX. + self.set_mode = (os.name == 'posix') or (os.name == 'java' and os._name == 'posix') + self.variants = set(('', 'X.Y')) + self._fileop = fileop or FileOperator(dry_run) + + self._is_nt = os.name == 'nt' or (os.name == 'java' and os._name == 'nt') + self.version_info = sys.version_info + + def _get_alternate_executable(self, executable, options): + if options.get('gui', False) and self._is_nt: # pragma: no cover + dn, fn = os.path.split(executable) + fn = fn.replace('python', 'pythonw') + executable = os.path.join(dn, fn) + return executable + + if sys.platform.startswith('java'): # pragma: no cover + + def _is_shell(self, executable): + """ + Determine if the specified executable is a script + (contains a #! line) + """ + try: + with open(executable) as fp: + return fp.read(2) == '#!' + except (OSError, IOError): + logger.warning('Failed to open %s', executable) + return False + + def _fix_jython_executable(self, executable): + if self._is_shell(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty('os.name') == 'Linux': + return executable + elif executable.lower().endswith('jython.exe'): + # Use wrapper exe for Jython on Windows + return executable + return '/usr/bin/env %s' % executable + + def _build_shebang(self, executable, post_interp): + """ + Build a shebang line. In the simple case (on Windows, or a shebang line + which is not too long or contains spaces) use a simple formulation for + the shebang. Otherwise, use /bin/sh as the executable, with a contrived + shebang which allows the script to run either under Python or sh, using + suitable quoting. Thanks to Harald Nordgren for his input. + + See also: http://www.in-ulm.de/~mascheck/various/shebang/#length + https://hg.mozilla.org/mozilla-central/file/tip/mach + """ + if os.name != 'posix': + simple_shebang = True + elif getattr(sys, "cross_compiling", False): + # In a cross-compiling environment, the shebang will likely be a + # script; this *must* be invoked with the "safe" version of the + # shebang, or else using os.exec() to run the entry script will + # fail, raising "OSError 8 [Errno 8] Exec format error". + simple_shebang = False + else: + # Add 3 for '#!' prefix and newline suffix. + shebang_length = len(executable) + len(post_interp) + 3 + if sys.platform == 'darwin': + max_shebang_length = 512 + else: + max_shebang_length = 127 + simple_shebang = ((b' ' not in executable) and (shebang_length <= max_shebang_length)) + + if simple_shebang: + result = b'#!' + executable + post_interp + b'\n' + else: + result = b'#!/bin/sh\n' + result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' + result += b"' '''\n" + return result + + def _get_shebang(self, encoding, post_interp=b'', options=None): + enquote = True + if self.executable: + executable = self.executable + enquote = False # assume this will be taken care of + elif not sysconfig.is_python_build(): + executable = get_executable() + elif in_venv(): # pragma: no cover + executable = os.path.join(sysconfig.get_path('scripts'), 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + if os.name == 'nt': + # for Python builds from source on Windows, no Python executables with + # a version suffix are created, so we use python.exe + executable = os.path.join(sysconfig.get_config_var('BINDIR'), + 'python%s' % (sysconfig.get_config_var('EXE'))) + else: + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), sysconfig.get_config_var('EXE'))) + if options: + executable = self._get_alternate_executable(executable, options) + + if sys.platform.startswith('java'): # pragma: no cover + executable = self._fix_jython_executable(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + + # If the user didn't specify an executable, it may be necessary to + # cater for executable paths with spaces (not uncommon on Windows) + if enquote: + executable = enquote_executable(executable) + # Issue #51: don't use fsencode, since we later try to + # check that the shebang is decodable using utf-8. + executable = executable.encode('utf-8') + # in case of IronPython, play safe and enable frames support + if (sys.platform == 'cli' and '-X:Frames' not in post_interp and + '-X:FullFrames' not in post_interp): # pragma: no cover + post_interp += b' -X:Frames' + shebang = self._build_shebang(executable, post_interp) + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: # pragma: no cover + raise ValueError('The shebang (%r) is not decodable from utf-8' % shebang) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + if encoding != 'utf-8': + try: + shebang.decode(encoding) + except UnicodeDecodeError: # pragma: no cover + raise ValueError('The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % (shebang, encoding)) + return shebang + + def _get_script_text(self, entry): + return self.script_template % dict( + module=entry.prefix, import_name=entry.suffix.split('.')[0], func=entry.suffix) + + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + use_launcher = self.add_launchers and self._is_nt + if not use_launcher: + script_bytes = shebang + script_bytes + else: # pragma: no cover + if ext == 'py': + launcher = self._get_launcher('t') + else: + launcher = self._get_launcher('w') + stream = BytesIO() + with ZipFile(stream, 'w') as zf: + source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH') + if source_date_epoch: + date_time = time.gmtime(int(source_date_epoch))[:6] + zinfo = ZipInfo(filename='__main__.py', date_time=date_time) + zf.writestr(zinfo, script_bytes) + else: + zf.writestr('__main__.py', script_bytes) + zip_data = stream.getvalue() + script_bytes = launcher + shebang + zip_data + for name in names: + outname = os.path.join(self.target_dir, name) + if use_launcher: # pragma: no cover + n, e = os.path.splitext(outname) + if e.startswith('.py'): + outname = n + outname = '%s.exe' % outname + try: + self._fileop.write_binary_file(outname, script_bytes) + except Exception: + # Failed writing an executable - it might be in use. + logger.warning('Failed to write executable - trying to ' + 'use .deleteme logic') + dfname = '%s.deleteme' % outname + if os.path.exists(dfname): + os.remove(dfname) # Not allowed to fail here + os.rename(outname, dfname) # nor here + self._fileop.write_binary_file(outname, script_bytes) + logger.debug('Able to replace executable using ' + '.deleteme logic') + try: + os.remove(dfname) + except Exception: + pass # still in use - ignore error + else: + if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + outname = '%s.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_binary_file(outname, script_bytes) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + + variant_separator = '-' + + def get_script_filenames(self, name): + result = set() + if '' in self.variants: + result.add(name) + if 'X' in self.variants: + result.add('%s%s' % (name, self.version_info[0])) + if 'X.Y' in self.variants: + result.add('%s%s%s.%s' % (name, self.variant_separator, self.version_info[0], self.version_info[1])) + return result + + def _make_script(self, entry, filenames, options=None): + post_interp = b'' + if options: + args = options.get('interpreter_args', []) + if args: + args = ' %s' % ' '.join(args) + post_interp = args.encode('utf-8') + shebang = self._get_shebang('utf-8', post_interp, options=options) + script = self._get_script_text(entry).encode('utf-8') + scriptnames = self.get_script_filenames(entry.name) + if options and options.get('gui', False): + ext = 'pyw' + else: + ext = 'py' + self._write_script(scriptnames, shebang, script, filenames, ext) + + def _copy_script(self, script, filenames): + adjust = False + script = os.path.join(self.source_dir, convert_path(script)) + outname = os.path.join(self.target_dir, os.path.basename(script)) + if not self.force and not self._fileop.newer(script, outname): + logger.debug('not copying %s (up-to-date)', script) + return + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, 'rb') + except IOError: # pragma: no cover + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: # pragma: no cover + logger.warning('%s is an empty file (skipping)', script) + return + + match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if not adjust: + if f: + f.close() + self._fileop.copy_file(script, outname) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + else: + logger.info('copying and adjusting %s -> %s', script, self.target_dir) + if not self._fileop.dry_run: + encoding, lines = detect_encoding(f.readline) + f.seek(0) + shebang = self._get_shebang(encoding, post_interp) + if b'pythonw' in first_line: # pragma: no cover + ext = 'pyw' + else: + ext = 'py' + n = os.path.basename(outname) + self._write_script([n], shebang, f.read(), filenames, ext) + if f: + f.close() + + @property + def dry_run(self): + return self._fileop.dry_run + + @dry_run.setter + def dry_run(self, value): + self._fileop.dry_run = value + + if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + # Executable launcher support. + # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ + + def _get_launcher(self, kind): + if struct.calcsize('P') == 8: # 64-bit + bits = '64' + else: + bits = '32' + platform_suffix = '-arm' if get_platform() == 'win-arm64' else '' + name = '%s%s%s.exe' % (kind, bits, platform_suffix) + if name not in WRAPPERS: + msg = ('Unable to find resource %s in package %s' % + (name, DISTLIB_PACKAGE)) + raise ValueError(msg) + return WRAPPERS[name] + + # Public API follows + + def make(self, specification, options=None): + """ + Make a script. + + :param specification: The specification, which is either a valid export + entry specification (to make a script from a + callable) or a filename (to make a script by + copying from a source location). + :param options: A dictionary of options controlling script generation. + :return: A list of all absolute pathnames written to. + """ + filenames = [] + entry = get_export_entry(specification) + if entry is None: + self._copy_script(specification, filenames) + else: + self._make_script(entry, filenames, options=options) + return filenames + + def make_multiple(self, specifications, options=None): + """ + Take a list of specifications and make scripts from them, + :param specifications: A list of specifications. + :return: A list of all absolute pathnames written to, + """ + filenames = [] + for specification in specifications: + filenames.extend(self.make(specification, options)) + return filenames diff --git a/.venv/Lib/site-packages/distlib/t32.exe b/.venv/Lib/site-packages/distlib/t32.exe new file mode 100644 index 00000000..52154f0b Binary files /dev/null and b/.venv/Lib/site-packages/distlib/t32.exe differ diff --git a/.venv/Lib/site-packages/distlib/t64-arm.exe b/.venv/Lib/site-packages/distlib/t64-arm.exe new file mode 100644 index 00000000..e1ab8f8f Binary files /dev/null and b/.venv/Lib/site-packages/distlib/t64-arm.exe differ diff --git a/.venv/Lib/site-packages/distlib/t64.exe b/.venv/Lib/site-packages/distlib/t64.exe new file mode 100644 index 00000000..e8bebdba Binary files /dev/null and b/.venv/Lib/site-packages/distlib/t64.exe differ diff --git a/.venv/Lib/site-packages/distlib/util.py b/.venv/Lib/site-packages/distlib/util.py new file mode 100644 index 00000000..0d5bd7a8 --- /dev/null +++ b/.venv/Lib/site-packages/distlib/util.py @@ -0,0 +1,1984 @@ +# +# Copyright (C) 2012-2023 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import codecs +from collections import deque +import contextlib +import csv +from glob import iglob as std_iglob +import io +import json +import logging +import os +import py_compile +import re +import socket +try: + import ssl +except ImportError: # pragma: no cover + ssl = None +import subprocess +import sys +import tarfile +import tempfile +import textwrap + +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import time + +from . import DistlibException +from .compat import (string_types, text_type, shutil, raw_input, StringIO, cache_from_source, urlopen, urljoin, httplib, + xmlrpclib, HTTPHandler, BaseConfigurator, valid_ident, Container, configparser, URLError, ZipFile, + fsdecode, unquote, urlparse) + +logger = logging.getLogger(__name__) + +# +# Requirement parsing code as per PEP 508 +# + +IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') +VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') +COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') +MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') +OR = re.compile(r'^or\b\s*') +AND = re.compile(r'^and\b\s*') +NON_SPACE = re.compile(r'(\S+)\s*') +STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') + + +def parse_marker(marker_string): + """ + Parse a marker string and return a dictionary containing a marker expression. + + The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in + the expression grammar, or strings. A string contained in quotes is to be + interpreted as a literal string, and a string not contained in quotes is a + variable (such as os_name). + """ + + def marker_var(remaining): + # either identifier, or literal string + m = IDENTIFIER.match(remaining) + if m: + result = m.groups()[0] + remaining = remaining[m.end():] + elif not remaining: + raise SyntaxError('unexpected end of input') + else: + q = remaining[0] + if q not in '\'"': + raise SyntaxError('invalid expression: %s' % remaining) + oq = '\'"'.replace(q, '') + remaining = remaining[1:] + parts = [q] + while remaining: + # either a string chunk, or oq, or q to terminate + if remaining[0] == q: + break + elif remaining[0] == oq: + parts.append(oq) + remaining = remaining[1:] + else: + m = STRING_CHUNK.match(remaining) + if not m: + raise SyntaxError('error in string literal: %s' % remaining) + parts.append(m.groups()[0]) + remaining = remaining[m.end():] + else: + s = ''.join(parts) + raise SyntaxError('unterminated string: %s' % s) + parts.append(q) + result = ''.join(parts) + remaining = remaining[1:].lstrip() # skip past closing quote + return result, remaining + + def marker_expr(remaining): + if remaining and remaining[0] == '(': + result, remaining = marker(remaining[1:].lstrip()) + if remaining[0] != ')': + raise SyntaxError('unterminated parenthesis: %s' % remaining) + remaining = remaining[1:].lstrip() + else: + lhs, remaining = marker_var(remaining) + while remaining: + m = MARKER_OP.match(remaining) + if not m: + break + op = m.groups()[0] + remaining = remaining[m.end():] + rhs, remaining = marker_var(remaining) + lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} + result = lhs + return result, remaining + + def marker_and(remaining): + lhs, remaining = marker_expr(remaining) + while remaining: + m = AND.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_expr(remaining) + lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + def marker(remaining): + lhs, remaining = marker_and(remaining) + while remaining: + m = OR.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_and(remaining) + lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + return marker(marker_string) + + +def parse_requirement(req): + """ + Parse a requirement passed in as a string. Return a Container + whose attributes contain the various parts of the requirement. + """ + remaining = req.strip() + if not remaining or remaining.startswith('#'): + return None + m = IDENTIFIER.match(remaining) + if not m: + raise SyntaxError('name expected: %s' % remaining) + distname = m.groups()[0] + remaining = remaining[m.end():] + extras = mark_expr = versions = uri = None + if remaining and remaining[0] == '[': + i = remaining.find(']', 1) + if i < 0: + raise SyntaxError('unterminated extra: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + extras = [] + while s: + m = IDENTIFIER.match(s) + if not m: + raise SyntaxError('malformed extra: %s' % s) + extras.append(m.groups()[0]) + s = s[m.end():] + if not s: + break + if s[0] != ',': + raise SyntaxError('comma expected in extras: %s' % s) + s = s[1:].lstrip() + if not extras: + extras = None + if remaining: + if remaining[0] == '@': + # it's a URI + remaining = remaining[1:].lstrip() + m = NON_SPACE.match(remaining) + if not m: + raise SyntaxError('invalid URI: %s' % remaining) + uri = m.groups()[0] + t = urlparse(uri) + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not (t.scheme and t.netloc): + raise SyntaxError('Invalid URL: %s' % uri) + remaining = remaining[m.end():].lstrip() + else: + + def get_versions(ver_remaining): + """ + Return a list of operator, version tuples if any are + specified, else None. + """ + m = COMPARE_OP.match(ver_remaining) + versions = None + if m: + versions = [] + while True: + op = m.groups()[0] + ver_remaining = ver_remaining[m.end():] + m = VERSION_IDENTIFIER.match(ver_remaining) + if not m: + raise SyntaxError('invalid version: %s' % ver_remaining) + v = m.groups()[0] + versions.append((op, v)) + ver_remaining = ver_remaining[m.end():] + if not ver_remaining or ver_remaining[0] != ',': + break + ver_remaining = ver_remaining[1:].lstrip() + # Some packages have a trailing comma which would break things + # See issue #148 + if not ver_remaining: + break + m = COMPARE_OP.match(ver_remaining) + if not m: + raise SyntaxError('invalid constraint: %s' % ver_remaining) + if not versions: + versions = None + return versions, ver_remaining + + if remaining[0] != '(': + versions, remaining = get_versions(remaining) + else: + i = remaining.find(')', 1) + if i < 0: + raise SyntaxError('unterminated parenthesis: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + # As a special diversion from PEP 508, allow a version number + # a.b.c in parentheses as a synonym for ~= a.b.c (because this + # is allowed in earlier PEPs) + if COMPARE_OP.match(s): + versions, _ = get_versions(s) + else: + m = VERSION_IDENTIFIER.match(s) + if not m: + raise SyntaxError('invalid constraint: %s' % s) + v = m.groups()[0] + s = s[m.end():].lstrip() + if s: + raise SyntaxError('invalid constraint: %s' % s) + versions = [('~=', v)] + + if remaining: + if remaining[0] != ';': + raise SyntaxError('invalid requirement: %s' % remaining) + remaining = remaining[1:].lstrip() + + mark_expr, remaining = parse_marker(remaining) + + if remaining and remaining[0] != '#': + raise SyntaxError('unexpected trailing data: %s' % remaining) + + if not versions: + rs = distname + else: + rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) + return Container(name=distname, extras=extras, constraints=versions, marker=mark_expr, url=uri, requirement=rs) + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + + def get_rel_path(root, path): + # normalizes and returns a lstripped-/-separated path + root = root.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(root) + return path[len(root):].lstrip('/') + + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = get_rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = get_rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + +def in_venv(): + if hasattr(sys, 'real_prefix'): + # virtualenv venvs + result = True + else: + # PEP 405 venvs + result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + return result + + +def get_executable(): + # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as + # changes to the stub launcher mean that sys.executable always points + # to the stub on OS X + # if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' + # in os.environ): + # result = os.environ['__PYVENV_LAUNCHER__'] + # else: + # result = sys.executable + # return result + # Avoid normcasing: see issue #143 + # result = os.path.normcase(sys.executable) + result = sys.executable + if not isinstance(result, text_type): + result = fsdecode(result) + return result + + +def proceed(prompt, allowed_chars, error_prompt=None, default=None): + p = prompt + while True: + s = raw_input(p) + p = prompt + if not s and default: + s = default + if s: + c = s[0].lower() + if c in allowed_chars: + break + if error_prompt: + p = '%c: %s\n%s' % (c, error_prompt, prompt) + return c + + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + + +def read_exports(stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + # Try to load as JSON, falling back on legacy format + data = stream.read() + stream = StringIO(data) + try: + jdata = json.load(stream) + result = jdata['extensions']['python.exports']['exports'] + for group, entries in result.items(): + for k, v in entries.items(): + s = '%s = %s' % (k, v) + entry = get_export_entry(s) + assert entry is not None + entries[k] = entry + return result + except Exception: + stream.seek(0, 0) + + def read_stream(cp, stream): + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + + cp = configparser.ConfigParser() + try: + read_stream(cp, stream) + except configparser.MissingSectionHeaderError: + stream.close() + data = textwrap.dedent(data) + stream = StringIO(data) + read_stream(cp, stream) + + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + # entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getwriter('utf-8')(stream) + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + + +@contextlib.contextmanager +def chdir(d): + cwd = os.getcwd() + try: + os.chdir(d) + yield + finally: + os.chdir(cwd) + + +@contextlib.contextmanager +def socket_timeout(seconds=15): + cto = socket.getdefaulttimeout() + try: + socket.setdefaulttimeout(seconds) + yield + finally: + socket.setdefaulttimeout(cto) + + +class cached_property(object): + + def __init__(self, func): + self.func = func + # for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) + + def __get__(self, obj, cls=None): + if obj is None: + return self + value = self.func(obj) + object.__setattr__(obj, self.func.__name__, value) + # obj.__dict__[self.func.__name__] = value = self.func(obj) + return value + + +def convert_path(pathname): + """Return 'pathname' as a name that will work on the native filesystem. + + The path is split on '/' and put back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while os.curdir in paths: + paths.remove(os.curdir) + if not paths: + return os.curdir + return os.path.join(*paths) + + +class FileOperator(object): + + def __init__(self, dry_run=False): + self.dry_run = dry_run + self.ensured = set() + self._init_record() + + def _init_record(self): + self.record = False + self.files_written = set() + self.dirs_created = set() + + def record_as_written(self, path): + if self.record: + self.files_written.add(path) + + def newer(self, source, target): + """Tell if the target is newer than the source. + + Returns true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. + + Returns false if both exist and 'target' is the same age or younger + than 'source'. Raise PackagingFileError if 'source' does not exist. + + Note that this test is not very accurate: files created in the same + second will have the same "age". + """ + if not os.path.exists(source): + raise DistlibException("file '%r' does not exist" % os.path.abspath(source)) + if not os.path.exists(target): + return True + + return os.stat(source).st_mtime > os.stat(target).st_mtime + + def copy_file(self, infile, outfile, check=True): + """Copy a file respecting dry-run and force flags. + """ + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying %s to %s', infile, outfile) + if not self.dry_run: + msg = None + if check: + if os.path.islink(outfile): + msg = '%s is a symlink' % outfile + elif os.path.exists(outfile) and not os.path.isfile(outfile): + msg = '%s is a non-regular file' % outfile + if msg: + raise ValueError(msg + ' which would be overwritten') + shutil.copyfile(infile, outfile) + self.record_as_written(outfile) + + def copy_stream(self, instream, outfile, encoding=None): + assert not os.path.isdir(outfile) + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying stream %s to %s', instream, outfile) + if not self.dry_run: + if encoding is None: + outstream = open(outfile, 'wb') + else: + outstream = codecs.open(outfile, 'w', encoding=encoding) + try: + shutil.copyfileobj(instream, outstream) + finally: + outstream.close() + self.record_as_written(outfile) + + def write_binary_file(self, path, data): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + if os.path.exists(path): + os.remove(path) + with open(path, 'wb') as f: + f.write(data) + self.record_as_written(path) + + def write_text_file(self, path, data, encoding): + self.write_binary_file(path, data.encode(encoding)) + + def set_mode(self, bits, mask, files): + if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): + # Set the executable bits (owner, group, and world) on + # all the files specified. + for f in files: + if self.dry_run: + logger.info("changing mode of %s", f) + else: + mode = (os.stat(f).st_mode | bits) & mask + logger.info("changing mode of %s to %o", f, mode) + os.chmod(f, mode) + + set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) + + def ensure_dir(self, path): + path = os.path.abspath(path) + if path not in self.ensured and not os.path.exists(path): + self.ensured.add(path) + d, f = os.path.split(path) + self.ensure_dir(d) + logger.info('Creating %s' % path) + if not self.dry_run: + os.mkdir(path) + if self.record: + self.dirs_created.add(path) + + def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): + dpath = cache_from_source(path, not optimize) + logger.info('Byte-compiling %s to %s', path, dpath) + if not self.dry_run: + if force or self.newer(path, dpath): + if not prefix: + diagpath = None + else: + assert path.startswith(prefix) + diagpath = path[len(prefix):] + compile_kwargs = {} + if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): + if not isinstance(hashed_invalidation, py_compile.PycInvalidationMode): + hashed_invalidation = py_compile.PycInvalidationMode.CHECKED_HASH + compile_kwargs['invalidation_mode'] = hashed_invalidation + py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error + self.record_as_written(dpath) + return dpath + + def ensure_removed(self, path): + if os.path.exists(path): + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record: + if path in self.dirs_created: + self.dirs_created.remove(path) + else: + if os.path.islink(path): + s = 'link' + else: + s = 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record: + if path in self.files_written: + self.files_written.remove(path) + + def is_writable(self, path): + result = False + while not result: + if os.path.exists(path): + result = os.access(path, os.W_OK) + break + parent = os.path.dirname(path) + if parent == path: + break + path = parent + return result + + def commit(self): + """ + Commit recorded changes, turn off recording, return + changes. + """ + assert self.record + result = self.files_written, self.dirs_created + self._init_record() + return result + + def rollback(self): + if not self.dry_run: + for f in list(self.files_written): + if os.path.exists(f): + os.remove(f) + # dirs should all be empty now, except perhaps for + # __pycache__ subdirs + # reverse so that subdirs appear before their parents + dirs = sorted(self.dirs_created, reverse=True) + for d in dirs: + flist = os.listdir(d) + if flist: + assert flist == ['__pycache__'] + sd = os.path.join(d, flist[0]) + os.rmdir(sd) + os.rmdir(d) # should fail if non-empty + self._init_record() + + +def resolve(module_name, dotted_path): + if module_name in sys.modules: + mod = sys.modules[module_name] + else: + mod = __import__(module_name) + if dotted_path is None: + result = mod + else: + parts = dotted_path.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + +class ExportEntry(object): + + def __init__(self, name, prefix, suffix, flags): + self.name = name + self.prefix = prefix + self.suffix = suffix + self.flags = flags + + @cached_property + def value(self): + return resolve(self.prefix, self.suffix) + + def __repr__(self): # pragma: no cover + return '' % (self.name, self.prefix, self.suffix, self.flags) + + def __eq__(self, other): + if not isinstance(other, ExportEntry): + result = False + else: + result = (self.name == other.name and self.prefix == other.prefix and self.suffix == other.suffix and + self.flags == other.flags) + return result + + __hash__ = object.__hash__ + + +ENTRY_RE = re.compile( + r'''(?P([^\[]\S*)) + \s*=\s*(?P(\w+)([:\.]\w+)*) + \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + ''', re.VERBOSE) + + +def get_export_entry(specification): + m = ENTRY_RE.search(specification) + if not m: + result = None + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + else: + d = m.groupdict() + name = d['name'] + path = d['callable'] + colons = path.count(':') + if colons == 0: + prefix, suffix = path, None + else: + if colons != 1: + raise DistlibException("Invalid specification " + "'%s'" % specification) + prefix, suffix = path.split(':') + flags = d['flags'] + if flags is None: + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + flags = [] + else: + flags = [f.strip() for f in flags.split(',')] + result = ExportEntry(name, prefix, suffix, flags) + return result + + +def get_cache_base(suffix=None): + """ + Return the default base location for distlib caches. If the directory does + not exist, it is created. Use the suffix provided for the base directory, + and default to '.distlib' if it isn't provided. + + On Windows, if LOCALAPPDATA is defined in the environment, then it is + assumed to be a directory, and will be the parent directory of the result. + On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home + directory - using os.expanduser('~') - will be the parent directory of + the result. + + The result is just the directory '.distlib' in the parent directory as + determined above, or with the name specified with ``suffix``. + """ + if suffix is None: + suffix = '.distlib' + if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: + result = os.path.expandvars('$localappdata') + else: + # Assume posix, or old Windows + result = os.path.expanduser('~') + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if os.path.isdir(result): + usable = os.access(result, os.W_OK) + if not usable: + logger.warning('Directory exists but is not writable: %s', result) + else: + try: + os.makedirs(result) + usable = True + except OSError: + logger.warning('Unable to create %s', result, exc_info=True) + usable = False + if not usable: + result = tempfile.mkdtemp() + logger.warning('Default location unusable, using %s', result) + return os.path.join(result, suffix) + + +def path_to_cache_dir(path, use_abspath=True): + """ + Convert an absolute path to a directory name for use in a cache. + + The algorithm used is: + + #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. + #. Any occurrence of ``os.sep`` is replaced with ``'--'``. + #. ``'.cache'`` is appended. + """ + d, p = os.path.splitdrive(os.path.abspath(path) if use_abspath else path) + if d: + d = d.replace(':', '---') + p = p.replace(os.sep, '--') + return d + p + '.cache' + + +def ensure_slash(s): + if not s.endswith('/'): + return s + '/' + return s + + +def parse_credentials(netloc): + username = password = None + if '@' in netloc: + prefix, netloc = netloc.rsplit('@', 1) + if ':' not in prefix: + username = prefix + else: + username, password = prefix.split(':', 1) + if username: + username = unquote(username) + if password: + password = unquote(password) + return username, password, netloc + + +def get_process_umask(): + result = os.umask(0o22) + os.umask(result) + return result + + +def is_string_sequence(seq): + result = True + i = None + for i, s in enumerate(seq): + if not isinstance(s, string_types): + result = False + break + assert i is not None + return result + + +PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) +PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') + + +def split_filename(filename, project_name=None): + """ + Extract name, version, python version from a filename (no extension) + + Return name, version, pyver or None + """ + result = None + pyver = None + filename = unquote(filename).replace(' ', '-') + m = PYTHON_VERSION.search(filename) + if m: + pyver = m.group(1) + filename = filename[:m.start()] + if project_name and len(filename) > len(project_name) + 1: + m = re.match(re.escape(project_name) + r'\b', filename) + if m: + n = m.end() + result = filename[:n], filename[n + 1:], pyver + if result is None: + m = PROJECT_NAME_AND_VERSION.match(filename) + if m: + result = m.group(1), m.group(3), pyver + return result + + +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' + r'\(\s*(?P[^\s)]+)\)$') + + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + + +def get_extras(requested, available): + result = set() + requested = set(requested or []) + available = set(available or []) + if '*' in requested: + requested.remove('*') + result |= available + for r in requested: + if r == '-': + result.add(r) + elif r.startswith('-'): + unwanted = r[1:] + if unwanted not in available: + logger.warning('undeclared extra: %s' % unwanted) + if unwanted in result: + result.remove(unwanted) + else: + if r not in available: + logger.warning('undeclared extra: %s' % r) + result.add(r) + return result + + +# +# Extended metadata functionality +# + + +def _get_external_data(url): + result = {} + try: + # urlopen might fail if it runs into redirections, + # because of Python issue #13696. Fixed in locators + # using a custom redirect handler. + resp = urlopen(url) + headers = resp.info() + ct = headers.get('Content-Type') + if not ct.startswith('application/json'): + logger.debug('Unexpected response for JSON request: %s', ct) + else: + reader = codecs.getreader('utf-8')(resp) + # data = reader.read().decode('utf-8') + # result = json.loads(data) + result = json.load(reader) + except Exception as e: + logger.exception('Failed to get external data for %s: %s', url, e) + return result + + +_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + + +def get_project_data(name): + url = '%s/%s/project.json' % (name[0].upper(), name) + url = urljoin(_external_data_base_url, url) + result = _get_external_data(url) + return result + + +def get_package_data(name, version): + url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = urljoin(_external_data_base_url, url) + return _get_external_data(url) + + +class Cache(object): + """ + A class implementing a cache for resources that need to live in the file system + e.g. shared libraries. This class was moved from resources to here because it + could be used by other modules, e.g. the wheel module. + """ + + def __init__(self, base): + """ + Initialise an instance. + + :param base: The base directory where the cache should be located. + """ + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if not os.path.isdir(base): # pragma: no cover + os.makedirs(base) + if (os.stat(base).st_mode & 0o77) != 0: + logger.warning('Directory \'%s\' is not private', base) + self.base = os.path.abspath(os.path.normpath(base)) + + def prefix_to_dir(self, prefix, use_abspath=True): + """ + Converts a resource prefix to a directory name in the cache. + """ + return path_to_cache_dir(prefix, use_abspath=use_abspath) + + def clear(self): + """ + Clear the cache. + """ + not_removed = [] + for fn in os.listdir(self.base): + fn = os.path.join(self.base, fn) + try: + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + except Exception: + not_removed.append(fn) + return not_removed + + +class EventMixin(object): + """ + A very simple publish/subscribe system. + """ + + def __init__(self): + self._subscribers = {} + + def add(self, event, subscriber, append=True): + """ + Add a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be added (and called when the + event is published). + :param append: Whether to append or prepend the subscriber to an + existing subscriber list for the event. + """ + subs = self._subscribers + if event not in subs: + subs[event] = deque([subscriber]) + else: + sq = subs[event] + if append: + sq.append(subscriber) + else: + sq.appendleft(subscriber) + + def remove(self, event, subscriber): + """ + Remove a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be removed. + """ + subs = self._subscribers + if event not in subs: + raise ValueError('No subscribers: %r' % event) + subs[event].remove(subscriber) + + def get_subscribers(self, event): + """ + Return an iterator for the subscribers for an event. + :param event: The event to return subscribers for. + """ + return iter(self._subscribers.get(event, ())) + + def publish(self, event, *args, **kwargs): + """ + Publish a event and return a list of values returned by its + subscribers. + + :param event: The event to publish. + :param args: The positional arguments to pass to the event's + subscribers. + :param kwargs: The keyword arguments to pass to the event's + subscribers. + """ + result = [] + for subscriber in self.get_subscribers(event): + try: + value = subscriber(event, *args, **kwargs) + except Exception: + logger.exception('Exception during event publication') + value = None + result.append(value) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, args, kwargs, result) + return result + + +# +# Simple sequencing +# +class Sequencer(object): + + def __init__(self): + self._preds = {} + self._succs = {} + self._nodes = set() # nodes with no preds/succs + + def add_node(self, node): + self._nodes.add(node) + + def remove_node(self, node, edges=False): + if node in self._nodes: + self._nodes.remove(node) + if edges: + for p in set(self._preds.get(node, ())): + self.remove(p, node) + for s in set(self._succs.get(node, ())): + self.remove(node, s) + # Remove empties + for k, v in list(self._preds.items()): + if not v: + del self._preds[k] + for k, v in list(self._succs.items()): + if not v: + del self._succs[k] + + def add(self, pred, succ): + assert pred != succ + self._preds.setdefault(succ, set()).add(pred) + self._succs.setdefault(pred, set()).add(succ) + + def remove(self, pred, succ): + assert pred != succ + try: + preds = self._preds[succ] + succs = self._succs[pred] + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of anything' % succ) + try: + preds.remove(pred) + succs.remove(succ) + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of %r' % (succ, pred)) + + def is_step(self, step): + return (step in self._preds or step in self._succs or step in self._nodes) + + def get_steps(self, final): + if not self.is_step(final): + raise ValueError('Unknown: %r' % final) + result = [] + todo = [] + seen = set() + todo.append(final) + while todo: + step = todo.pop(0) + if step in seen: + # if a step was already seen, + # move it to the end (so it will appear earlier + # when reversed on return) ... but not for the + # final step, as that would be confusing for + # users + if step != final: + result.remove(step) + result.append(step) + else: + seen.add(step) + result.append(step) + preds = self._preds.get(step, ()) + todo.extend(preds) + return reversed(result) + + @property + def strong_connections(self): + # http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + index_counter = [0] + stack = [] + lowlinks = {} + index = {} + result = [] + + graph = self._succs + + def strongconnect(node): + # set the depth index for this node to the smallest unused index + index[node] = index_counter[0] + lowlinks[node] = index_counter[0] + index_counter[0] += 1 + stack.append(node) + + # Consider successors + try: + successors = graph[node] + except Exception: + successors = [] + for successor in successors: + if successor not in lowlinks: + # Successor has not yet been visited + strongconnect(successor) + lowlinks[node] = min(lowlinks[node], lowlinks[successor]) + elif successor in stack: + # the successor is in the stack and hence in the current + # strongly connected component (SCC) + lowlinks[node] = min(lowlinks[node], index[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == index[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: + break + component = tuple(connected_component) + # storing the result + result.append(component) + + for node in graph: + if node not in lowlinks: + strongconnect(node) + + return result + + @property + def dot(self): + result = ['digraph G {'] + for succ in self._preds: + preds = self._preds[succ] + for pred in preds: + result.append(' %s -> %s;' % (pred, succ)) + for node in self._nodes: + result.append(' %s;' % node) + result.append('}') + return '\n'.join(result) + + +# +# Unarchiving functionality for zip, tar, tgz, tbz, whl +# + +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', '.whl') + + +def unarchive(archive_filename, dest_dir, format=None, check=True): + + def check_path(path): + if not isinstance(path, text_type): + path = path.decode('utf-8') + p = os.path.abspath(os.path.join(dest_dir, path)) + if not p.startswith(dest_dir) or p[plen] != os.sep: + raise ValueError('path outside destination: %r' % p) + + dest_dir = os.path.abspath(dest_dir) + plen = len(dest_dir) + archive = None + if format is None: + if archive_filename.endswith(('.zip', '.whl')): + format = 'zip' + elif archive_filename.endswith(('.tar.gz', '.tgz')): + format = 'tgz' + mode = 'r:gz' + elif archive_filename.endswith(('.tar.bz2', '.tbz')): + format = 'tbz' + mode = 'r:bz2' + elif archive_filename.endswith('.tar'): + format = 'tar' + mode = 'r' + else: # pragma: no cover + raise ValueError('Unknown format for %r' % archive_filename) + try: + if format == 'zip': + archive = ZipFile(archive_filename, 'r') + if check: + names = archive.namelist() + for name in names: + check_path(name) + else: + archive = tarfile.open(archive_filename, mode) + if check: + names = archive.getnames() + for name in names: + check_path(name) + if format != 'zip' and sys.version_info[0] < 3: + # See Python issue 17153. If the dest path contains Unicode, + # tarfile extraction fails on Python 2.x if a member path name + # contains non-ASCII characters - it leads to an implicit + # bytes -> unicode conversion using ASCII to decode. + for tarinfo in archive.getmembers(): + if not isinstance(tarinfo.name, text_type): + tarinfo.name = tarinfo.name.decode('utf-8') + + # Limit extraction of dangerous items, if this Python + # allows it easily. If not, just trust the input. + # See: https://docs.python.org/3/library/tarfile.html#extraction-filters + def extraction_filter(member, path): + """Run tarfile.tar_filter, but raise the expected ValueError""" + # This is only called if the current Python has tarfile filters + try: + return tarfile.tar_filter(member, path) + except tarfile.FilterError as exc: + raise ValueError(str(exc)) + + archive.extraction_filter = extraction_filter + + archive.extractall(dest_dir) + + finally: + if archive: + archive.close() + + +def zip_dir(directory): + """zip a directory tree into a BytesIO object""" + result = io.BytesIO() + dlen = len(directory) + with ZipFile(result, "w") as zf: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + rel = root[dlen:] + dest = os.path.join(rel, name) + zf.write(full, dest) + return result + + +# +# Simple progress bar +# + +UNITS = ('', 'K', 'M', 'G', 'T', 'P') + + +class Progress(object): + unknown = 'UNKNOWN' + + def __init__(self, minval=0, maxval=100): + assert maxval is None or maxval >= minval + self.min = self.cur = minval + self.max = maxval + self.started = None + self.elapsed = 0 + self.done = False + + def update(self, curval): + assert self.min <= curval + assert self.max is None or curval <= self.max + self.cur = curval + now = time.time() + if self.started is None: + self.started = now + else: + self.elapsed = now - self.started + + def increment(self, incr): + assert incr >= 0 + self.update(self.cur + incr) + + def start(self): + self.update(self.min) + return self + + def stop(self): + if self.max is not None: + self.update(self.max) + self.done = True + + @property + def maximum(self): + return self.unknown if self.max is None else self.max + + @property + def percentage(self): + if self.done: + result = '100 %' + elif self.max is None: + result = ' ?? %' + else: + v = 100.0 * (self.cur - self.min) / (self.max - self.min) + result = '%3d %%' % v + return result + + def format_duration(self, duration): + if (duration <= 0) and self.max is None or self.cur == self.min: + result = '??:??:??' + # elif duration < 1: + # result = '--:--:--' + else: + result = time.strftime('%H:%M:%S', time.gmtime(duration)) + return result + + @property + def ETA(self): + if self.done: + prefix = 'Done' + t = self.elapsed + # import pdb; pdb.set_trace() + else: + prefix = 'ETA ' + if self.max is None: + t = -1 + elif self.elapsed == 0 or (self.cur == self.min): + t = 0 + else: + # import pdb; pdb.set_trace() + t = float(self.max - self.min) + t /= self.cur - self.min + t = (t - 1) * self.elapsed + return '%s: %s' % (prefix, self.format_duration(t)) + + @property + def speed(self): + if self.elapsed == 0: + result = 0.0 + else: + result = (self.cur - self.min) / self.elapsed + for unit in UNITS: + if result < 1000: + break + result /= 1000.0 + return '%d %sB/s' % (result, unit) + + +# +# Glob functionality +# + +RICH_GLOB = re.compile(r'\{([^}]*)\}') +_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') +_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') + + +def iglob(path_glob): + """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" + if _CHECK_RECURSIVE_GLOB.search(path_glob): + msg = """invalid glob %r: recursive glob "**" must be used alone""" + raise ValueError(msg % path_glob) + if _CHECK_MISMATCH_SET.search(path_glob): + msg = """invalid glob %r: mismatching set marker '{' or '}'""" + raise ValueError(msg % path_glob) + return _iglob(path_glob) + + +def _iglob(path_glob): + rich_path_glob = RICH_GLOB.split(path_glob, 1) + if len(rich_path_glob) > 1: + assert len(rich_path_glob) == 3, rich_path_glob + prefix, set, suffix = rich_path_glob + for item in set.split(','): + for path in _iglob(''.join((prefix, item, suffix))): + yield path + else: + if '**' not in path_glob: + for item in std_iglob(path_glob): + yield item + else: + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' + else: + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + for fn in _iglob(os.path.join(path, radical)): + yield fn + + +if ssl: + from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, CertificateError) + + # + # HTTPSConnection which verifies certificates/matches domains + # + + class HTTPSConnection(httplib.HTTPSConnection): + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None + + # noinspection PyPropertyAccess + def connect(self): + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, '_tunnel_host', False): + self.sock = sock + self._tunnel() + + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 + if getattr(self, 'cert_file', None): + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + + self.sock = context.wrap_socket(sock, **kwargs) + if self.ca_certs and self.check_domain: + try: + match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) + except CertificateError: # pragma: no cover + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + class HTTPSHandler(BaseHTTPSHandler): + + def __init__(self, ca_certs, check_domain=True): + BaseHTTPSHandler.__init__(self) + self.ca_certs = ca_certs + self.check_domain = check_domain + + def _conn_maker(self, *args, **kwargs): + """ + This is called to create a connection instance. Normally you'd + pass a connection class to do_open, but it doesn't actually check for + a class, and just expects a callable. As long as we behave just as a + constructor would have, we should be OK. If it ever changes so that + we *must* pass a class, we'll create an UnsafeHTTPSConnection class + which just sets check_domain to False in the class definition, and + choose which one to pass to do_open. + """ + result = HTTPSConnection(*args, **kwargs) + if self.ca_certs: + result.ca_certs = self.ca_certs + result.check_domain = self.check_domain + return result + + def https_open(self, req): + try: + return self.do_open(self._conn_maker, req) + except URLError as e: + if 'certificate verify failed' in str(e.reason): + raise CertificateError('Unable to verify server certificate ' + 'for %s' % req.host) + else: + raise + + # + # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- + # Middle proxy using HTTP listens on port 443, or an index mistakenly serves + # HTML containing a http://xyz link when it should be https://xyz), + # you can use the following handler class, which does not allow HTTP traffic. + # + # It works by inheriting from HTTPHandler - so build_opener won't add a + # handler for HTTP itself. + # + class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + + def http_open(self, req): + raise URLError('Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + + +# +# XML-RPC with timeouts +# +class Transport(xmlrpclib.Transport): + + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.Transport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, x509 = self.get_host_info(host) + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + return self._connection[1] + + +if ssl: + + class SafeTransport(xmlrpclib.SafeTransport): + + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.SafeTransport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, kwargs = self.get_host_info(host) + if not kwargs: + kwargs = {} + kwargs['timeout'] = self.timeout + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, **kwargs) + return self._connection[1] + + +class ServerProxy(xmlrpclib.ServerProxy): + + def __init__(self, uri, **kwargs): + self.timeout = timeout = kwargs.pop('timeout', None) + # The above classes only come into play if a timeout + # is specified + if timeout is not None: + # scheme = splittype(uri) # deprecated as of Python 3.8 + scheme = urlparse(uri)[0] + use_datetime = kwargs.get('use_datetime', 0) + if scheme == 'https': + tcls = SafeTransport + else: + tcls = Transport + kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) + self.transport = t + xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + + +# +# CSV functionality. This is provided because on 2.x, the csv module can't +# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. +# + + +def _csv_open(fn, mode, **kwargs): + if sys.version_info[0] < 3: + mode += 'b' + else: + kwargs['newline'] = '' + # Python 3 determines encoding from locale. Force 'utf-8' + # file encoding to match other forced utf-8 encoding + kwargs['encoding'] = 'utf-8' + return open(fn, mode, **kwargs) + + +class CSVBase(object): + defaults = { + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) + } + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.stream.close() + + +class CSVReader(CSVBase): + + def __init__(self, **kwargs): + if 'stream' in kwargs: + stream = kwargs['stream'] + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + self.stream = stream + else: + self.stream = _csv_open(kwargs['path'], 'r') + self.reader = csv.reader(self.stream, **self.defaults) + + def __iter__(self): + return self + + def next(self): + result = next(self.reader) + if sys.version_info[0] < 3: + for i, item in enumerate(result): + if not isinstance(item, text_type): + result[i] = item.decode('utf-8') + return result + + __next__ = next + + +class CSVWriter(CSVBase): + + def __init__(self, fn, **kwargs): + self.stream = _csv_open(fn, 'w') + self.writer = csv.writer(self.stream, **self.defaults) + + def writerow(self, row): + if sys.version_info[0] < 3: + r = [] + for item in row: + if isinstance(item, text_type): + item = item.encode('utf-8') + r.append(item) + row = r + self.writer.writerow(row) + + +# +# Configurator functionality +# + + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result + + +class SubprocessMixin(object): + """ + Mixin for running subprocesses and capturing their output + """ + + def __init__(self, verbose=False, progress=None): + self.verbose = verbose + self.progress = progress + + def reader(self, stream, context): + """ + Read lines from a subprocess' output stream and either pass to a progress + callable (if specified) or write progress information to sys.stderr. + """ + progress = self.progress + verbose = self.verbose + while True: + s = stream.readline() + if not s: + break + if progress is not None: + progress(s, context) + else: + if not verbose: + sys.stderr.write('.') + else: + sys.stderr.write(s.decode('utf-8')) + sys.stderr.flush() + stream.close() + + def run_command(self, cmd, **kwargs): + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) + t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) + t1.start() + t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) + t2.start() + p.wait() + t1.join() + t2.join() + if self.progress is not None: + self.progress('done.', 'main') + elif self.verbose: + sys.stderr.write('done.\n') + return p + + +def normalize_name(name): + """Normalize a python package name a la PEP 503""" + # https://www.python.org/dev/peps/pep-0503/#normalized-names + return re.sub('[-_.]+', '-', name).lower() + + +# def _get_pypirc_command(): +# """ +# Get the distutils command for interacting with PyPI configurations. +# :return: the command. +# """ +# from distutils.core import Distribution +# from distutils.config import PyPIRCCommand +# d = Distribution() +# return PyPIRCCommand(d) + + +class PyPIRCFile(object): + + DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' + DEFAULT_REALM = 'pypi' + + def __init__(self, fn=None, url=None): + if fn is None: + fn = os.path.join(os.path.expanduser('~'), '.pypirc') + self.filename = fn + self.url = url + + def read(self): + result = {} + + if os.path.exists(self.filename): + repository = self.url or self.DEFAULT_REPOSITORY + + config = configparser.RawConfigParser() + config.read(self.filename) + sections = config.sections() + if 'distutils' in sections: + # let's get the list of servers + index_servers = config.get('distutils', 'index-servers') + _servers = [server.strip() for server in index_servers.split('\n') if server.strip() != ''] + if _servers == []: + # nothing set, let's try to get the default pypi + if 'pypi' in sections: + _servers = ['pypi'] + else: + for server in _servers: + result = {'server': server} + result['username'] = config.get(server, 'username') + + # optional params + for key, default in (('repository', self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM), + ('password', None)): + if config.has_option(server, key): + result[key] = config.get(server, key) + else: + result[key] = default + + # work around people having "repository" for the "pypi" + # section of their config set to the HTTP (rather than + # HTTPS) URL + if (server == 'pypi' and repository in (self.DEFAULT_REPOSITORY, 'pypi')): + result['repository'] = self.DEFAULT_REPOSITORY + elif (result['server'] != repository and result['repository'] != repository): + result = {} + elif 'server-login' in sections: + # old format + server = 'server-login' + if config.has_option(server, 'repository'): + repository = config.get(server, 'repository') + else: + repository = self.DEFAULT_REPOSITORY + result = { + 'username': config.get(server, 'username'), + 'password': config.get(server, 'password'), + 'repository': repository, + 'server': server, + 'realm': self.DEFAULT_REALM + } + return result + + def update(self, username, password): + # import pdb; pdb.set_trace() + config = configparser.RawConfigParser() + fn = self.filename + config.read(fn) + if not config.has_section('pypi'): + config.add_section('pypi') + config.set('pypi', 'username', username) + config.set('pypi', 'password', password) + with open(fn, 'w') as f: + config.write(f) + + +def _load_pypirc(index): + """ + Read the PyPI access configuration as supported by distutils. + """ + return PyPIRCFile(url=index.url).read() + + +def _store_pypirc(index): + PyPIRCFile().update(index.username, index.password) + + +# +# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor +# tweaks +# + + +def get_host_platform(): + """Return a string that identifies the current platform. This is used mainly to + distinguish platform-specific build directories and platform-specific built + distributions. Typically includes the OS name and version and the + architecture (as supplied by 'os.uname()'), although the exact information + included depends on the OS; eg. on Linux, the kernel version isn't + particularly important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + + """ + if os.name == 'nt': + if 'amd64' in sys.version.lower(): + return 'win-amd64' + if '(arm)' in sys.version.lower(): + return 'win-arm32' + if '(arm64)' in sys.version.lower(): + return 'win-arm64' + return sys.platform + + # Set for cross builds explicitly + if "_PYTHON_HOST_PLATFORM" in os.environ: + return os.environ["_PYTHON_HOST_PLATFORM"] + + if os.name != 'posix' or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + + (osname, host, release, version, machine) = os.uname() + + # Convert the OS name to lowercase, remove '/' characters, and translate + # spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_').replace('/', '-') + + if osname[:5] == 'linux': + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + + elif osname[:5] == 'sunos': + if release[0] >= '5': # SunOS 5 == Solaris 2 + osname = 'solaris' + release = '%d.%s' % (int(release[0]) - 3, release[2:]) + # We can't use 'platform.architecture()[0]' because a + # bootstrap problem. We use a dict to get an error + # if some suspicious happens. + bitness = {2147483647: '32bit', 9223372036854775807: '64bit'} + machine += '.%s' % bitness[sys.maxsize] + # fall through to standard osname-release-machine representation + elif osname[:3] == 'aix': + from _aix_support import aix_platform + return aix_platform() + elif osname[:6] == 'cygwin': + osname = 'cygwin' + rel_re = re.compile(r'[\d.]+', re.ASCII) + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == 'darwin': + import _osx_support + try: + from distutils import sysconfig + except ImportError: + import sysconfig + osname, release, machine = _osx_support.get_platform_osx(sysconfig.get_config_vars(), osname, release, machine) + + return '%s-%s-%s' % (osname, release, machine) + + +_TARGET_TO_PLAT = { + 'x86': 'win32', + 'x64': 'win-amd64', + 'arm': 'win-arm32', +} + + +def get_platform(): + if os.name != 'nt': + return get_host_platform() + cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') + if cross_compilation_target not in _TARGET_TO_PLAT: + return get_host_platform() + return _TARGET_TO_PLAT[cross_compilation_target] diff --git a/.venv/Lib/site-packages/distlib/version.py b/.venv/Lib/site-packages/distlib/version.py new file mode 100644 index 00000000..d70a96ef --- /dev/null +++ b/.venv/Lib/site-packages/distlib/version.py @@ -0,0 +1,750 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2023 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Implementation of a flexible versioning scheme providing support for PEP-440, +setuptools-compatible and semantic versioning. +""" + +import logging +import re + +from .compat import string_types +from .util import parse_requirement + +__all__ = ['NormalizedVersion', 'NormalizedMatcher', + 'LegacyVersion', 'LegacyMatcher', + 'SemanticVersion', 'SemanticMatcher', + 'UnsupportedVersionError', 'get_scheme'] + +logger = logging.getLogger(__name__) + + +class UnsupportedVersionError(ValueError): + """This is an unsupported version.""" + pass + + +class Version(object): + def __init__(self, s): + self._string = s = s.strip() + self._parts = parts = self.parse(s) + assert isinstance(parts, tuple) + assert len(parts) > 0 + + def parse(self, s): + raise NotImplementedError('please implement in a subclass') + + def _check_compatible(self, other): + if type(self) != type(other): + raise TypeError('cannot compare %r and %r' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + self._check_compatible(other) + return self._parts < other._parts + + def __gt__(self, other): + return not (self.__lt__(other) or self.__eq__(other)) + + def __le__(self, other): + return self.__lt__(other) or self.__eq__(other) + + def __ge__(self, other): + return self.__gt__(other) or self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self._parts) + + def __repr__(self): + return "%s('%s')" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + @property + def is_prerelease(self): + raise NotImplementedError('Please implement in subclasses.') + + +class Matcher(object): + version_class = None + + # value is either a callable or the name of a method + _operators = { + '<': lambda v, c, p: v < c, + '>': lambda v, c, p: v > c, + '<=': lambda v, c, p: v == c or v < c, + '>=': lambda v, c, p: v == c or v > c, + '==': lambda v, c, p: v == c, + '===': lambda v, c, p: v == c, + # by default, compatible => >=. + '~=': lambda v, c, p: v == c or v > c, + '!=': lambda v, c, p: v != c, + } + + # this is a method only to support alternative implementations + # via overriding + def parse_requirement(self, s): + return parse_requirement(s) + + def __init__(self, s): + if self.version_class is None: + raise ValueError('Please specify a version class') + self._string = s = s.strip() + r = self.parse_requirement(s) + if not r: + raise ValueError('Not valid: %r' % s) + self.name = r.name + self.key = self.name.lower() # for case-insensitive comparisons + clist = [] + if r.constraints: + # import pdb; pdb.set_trace() + for op, s in r.constraints: + if s.endswith('.*'): + if op not in ('==', '!='): + raise ValueError('\'.*\' not allowed for ' + '%r constraints' % op) + # Could be a partial version (e.g. for '2.*') which + # won't parse as a version, so keep it as a string + vn, prefix = s[:-2], True + # Just to check that vn is a valid version + self.version_class(vn) + else: + # Should parse as a version, so we can create an + # instance for the comparison + vn, prefix = self.version_class(s), False + clist.append((op, vn, prefix)) + self._parts = tuple(clist) + + def match(self, version): + """ + Check if the provided version matches the constraints. + + :param version: The version to match against this instance. + :type version: String or :class:`Version` instance. + """ + if isinstance(version, string_types): + version = self.version_class(version) + for operator, constraint, prefix in self._parts: + f = self._operators.get(operator) + if isinstance(f, string_types): + f = getattr(self, f) + if not f: + msg = ('%r not implemented ' + 'for %s' % (operator, self.__class__.__name__)) + raise NotImplementedError(msg) + if not f(version, constraint, prefix): + return False + return True + + @property + def exact_version(self): + result = None + if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): + result = self._parts[0][1] + return result + + def _check_compatible(self, other): + if type(self) != type(other) or self.name != other.name: + raise TypeError('cannot compare %s and %s' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self.key == other.key and self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self.key) + hash(self._parts) + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + +PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|alpha|b|beta|c|rc|pre|preview)(\d+)?)?' + r'(\.(post|r|rev)(\d+)?)?([._-]?(dev)(\d+)?)?' + r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$', re.I) + + +def _pep_440_key(s): + s = s.strip() + m = PEP440_VERSION_RE.match(s) + if not m: + raise UnsupportedVersionError('Not a valid version: %s' % s) + groups = m.groups() + nums = tuple(int(v) for v in groups[1].split('.')) + while len(nums) > 1 and nums[-1] == 0: + nums = nums[:-1] + + if not groups[0]: + epoch = 0 + else: + epoch = int(groups[0][:-1]) + pre = groups[4:6] + post = groups[7:9] + dev = groups[10:12] + local = groups[13] + if pre == (None, None): + pre = () + else: + if pre[1] is None: + pre = pre[0], 0 + else: + pre = pre[0], int(pre[1]) + if post == (None, None): + post = () + else: + if post[1] is None: + post = post[0], 0 + else: + post = post[0], int(post[1]) + if dev == (None, None): + dev = () + else: + if dev[1] is None: + dev = dev[0], 0 + else: + dev = dev[0], int(dev[1]) + if local is None: + local = () + else: + parts = [] + for part in local.split('.'): + # to ensure that numeric compares as > lexicographic, avoid + # comparing them directly, but encode a tuple which ensures + # correct sorting + if part.isdigit(): + part = (1, int(part)) + else: + part = (0, part) + parts.append(part) + local = tuple(parts) + if not pre: + # either before pre-release, or final release and after + if not post and dev: + # before pre-release + pre = ('a', -1) # to sort before a0 + else: + pre = ('z',) # to sort after all pre-releases + # now look at the state of post and dev. + if not post: + post = ('_',) # sort before 'a' + if not dev: + dev = ('final',) + + return epoch, nums, pre, post, dev, local + + +_normalized_key = _pep_440_key + + +class NormalizedVersion(Version): + """A rational version. + + Good: + 1.2 # equivalent to "1.2.0" + 1.2.0 + 1.2a1 + 1.2.3a2 + 1.2.3b1 + 1.2.3c1 + 1.2.3.4 + TODO: fill this out + + Bad: + 1 # minimum two numbers + 1.2a # release level must have a release serial + 1.2.3b + """ + def parse(self, s): + result = _normalized_key(s) + # _normalized_key loses trailing zeroes in the release + # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 + # However, PEP 440 prefix matching needs it: for example, + # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). + m = PEP440_VERSION_RE.match(s) # must succeed + groups = m.groups() + self._release_clause = tuple(int(v) for v in groups[1].split('.')) + return result + + PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) + + @property + def is_prerelease(self): + return any(t[0] in self.PREREL_TAGS for t in self._parts if t) + + +def _match_prefix(x, y): + x = str(x) + y = str(y) + if x == y: + return True + if not x.startswith(y): + return False + n = len(y) + return x[n] == '.' + + +class NormalizedMatcher(Matcher): + version_class = NormalizedVersion + + # value is either a callable or the name of a method + _operators = { + '~=': '_match_compatible', + '<': '_match_lt', + '>': '_match_gt', + '<=': '_match_le', + '>=': '_match_ge', + '==': '_match_eq', + '===': '_match_arbitrary', + '!=': '_match_ne', + } + + def _adjust_local(self, version, constraint, prefix): + if prefix: + strip_local = '+' not in constraint and version._parts[-1] + else: + # both constraint and version are + # NormalizedVersion instances. + # If constraint does not have a local component, + # ensure the version doesn't, either. + strip_local = not constraint._parts[-1] and version._parts[-1] + if strip_local: + s = version._string.split('+', 1)[0] + version = self.version_class(s) + return version, constraint + + def _match_lt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version >= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_gt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version <= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_le(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version <= constraint + + def _match_ge(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version >= constraint + + def _match_eq(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version == constraint) + else: + result = _match_prefix(version, constraint) + return result + + def _match_arbitrary(self, version, constraint, prefix): + return str(version) == str(constraint) + + def _match_ne(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version != constraint) + else: + result = not _match_prefix(version, constraint) + return result + + def _match_compatible(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version == constraint: + return True + if version < constraint: + return False +# if not prefix: +# return True + release_clause = constraint._release_clause + if len(release_clause) > 1: + release_clause = release_clause[:-1] + pfx = '.'.join([str(i) for i in release_clause]) + return _match_prefix(version, pfx) + + +_REPLACEMENTS = ( + (re.compile('[.+-]$'), ''), # remove trailing puncts + (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start + (re.compile('^[.-]'), ''), # remove leading puncts + (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses + (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha + (re.compile(r'\b(pre-alpha|prealpha)\b'), + 'pre.alpha'), # standardise + (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses +) + +_SUFFIX_REPLACEMENTS = ( + (re.compile('^[:~._+-]+'), ''), # remove leading puncts + (re.compile('[,*")([\\]]'), ''), # remove unwanted chars + (re.compile('[~:+_ -]'), '.'), # replace illegal chars + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\.$'), ''), # trailing '.' +) + +_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') + + +def _suggest_semantic_version(s): + """ + Try to suggest a semantic form for a version for which + _suggest_normalized_version couldn't come up with anything. + """ + result = s.strip().lower() + for pat, repl in _REPLACEMENTS: + result = pat.sub(repl, result) + if not result: + result = '0.0.0' + + # Now look for numeric prefix, and separate it out from + # the rest. + # import pdb; pdb.set_trace() + m = _NUMERIC_PREFIX.match(result) + if not m: + prefix = '0.0.0' + suffix = result + else: + prefix = m.groups()[0].split('.') + prefix = [int(i) for i in prefix] + while len(prefix) < 3: + prefix.append(0) + if len(prefix) == 3: + suffix = result[m.end():] + else: + suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] + prefix = prefix[:3] + prefix = '.'.join([str(i) for i in prefix]) + suffix = suffix.strip() + if suffix: + # import pdb; pdb.set_trace() + # massage the suffix. + for pat, repl in _SUFFIX_REPLACEMENTS: + suffix = pat.sub(repl, suffix) + + if not suffix: + result = prefix + else: + sep = '-' if 'dev' in suffix else '+' + result = prefix + sep + suffix + if not is_semver(result): + result = None + return result + + +def _suggest_normalized_version(s): + """Suggest a normalized version close to the given version string. + + If you have a version string that isn't rational (i.e. NormalizedVersion + doesn't like it) then you might be able to get an equivalent (or close) + rational version from this function. + + This does a number of simple normalizations to the given string, based + on observation of versions currently in use on PyPI. Given a dump of + those version during PyCon 2009, 4287 of them: + - 2312 (53.93%) match NormalizedVersion without change + with the automatic suggestion + - 3474 (81.04%) match when using this suggestion method + + @param s {str} An irrational version string. + @returns A rational version string, or None, if couldn't determine one. + """ + try: + _normalized_key(s) + return s # already rational + except UnsupportedVersionError: + pass + + rs = s.lower() + + # part of this could use maketrans + for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), + ('beta', 'b'), ('rc', 'c'), ('-final', ''), + ('-pre', 'c'), + ('-release', ''), ('.release', ''), ('-stable', ''), + ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), + ('final', '')): + rs = rs.replace(orig, repl) + + # if something ends with dev or pre, we add a 0 + rs = re.sub(r"pre$", r"pre0", rs) + rs = re.sub(r"dev$", r"dev0", rs) + + # if we have something like "b-2" or "a.2" at the end of the + # version, that is probably beta, alpha, etc + # let's remove the dash or dot + rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) + + # 1.0-dev-r371 -> 1.0.dev371 + # 0.1-dev-r79 -> 0.1.dev79 + rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) + + # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 + rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) + + # Clean: v0.3, v1.0 + if rs.startswith('v'): + rs = rs[1:] + + # Clean leading '0's on numbers. + # TODO: unintended side-effect on, e.g., "2003.05.09" + # PyPI stats: 77 (~2%) better + rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) + + # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers + # zero. + # PyPI stats: 245 (7.56%) better + rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) + + # the 'dev-rNNN' tag is a dev tag + rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) + + # clean the - when used as a pre delimiter + rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) + + # a terminal "dev" or "devel" can be changed into ".dev0" + rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) + + # a terminal "dev" can be changed into ".dev0" + rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) + + # a terminal "final" or "stable" can be removed + rs = re.sub(r"(final|stable)$", "", rs) + + # The 'r' and the '-' tags are post release tags + # 0.4a1.r10 -> 0.4a1.post10 + # 0.9.33-17222 -> 0.9.33.post17222 + # 0.9.33-r17222 -> 0.9.33.post17222 + rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) + + # Clean 'r' instead of 'dev' usage: + # 0.9.33+r17222 -> 0.9.33.dev17222 + # 1.0dev123 -> 1.0.dev123 + # 1.0.git123 -> 1.0.dev123 + # 1.0.bzr123 -> 1.0.dev123 + # 0.1a0dev.123 -> 0.1a0.dev123 + # PyPI stats: ~150 (~4%) better + rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) + + # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: + # 0.2.pre1 -> 0.2c1 + # 0.2-c1 -> 0.2c1 + # 1.0preview123 -> 1.0c123 + # PyPI stats: ~21 (0.62%) better + rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) + + # Tcl/Tk uses "px" for their post release markers + rs = re.sub(r"p(\d+)$", r".post\1", rs) + + try: + _normalized_key(rs) + except UnsupportedVersionError: + rs = None + return rs + +# +# Legacy version processing (distribute-compatible) +# + + +_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) +_VERSION_REPLACE = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + '': None, + '.': None, +} + + +def _legacy_key(s): + def get_parts(s): + result = [] + for p in _VERSION_PART.split(s.lower()): + p = _VERSION_REPLACE.get(p, p) + if p: + if '0' <= p[:1] <= '9': + p = p.zfill(8) + else: + p = '*' + p + result.append(p) + result.append('*final') + return result + + result = [] + for p in get_parts(s): + if p.startswith('*'): + if p < '*final': + while result and result[-1] == '*final-': + result.pop() + while result and result[-1] == '00000000': + result.pop() + result.append(p) + return tuple(result) + + +class LegacyVersion(Version): + def parse(self, s): + return _legacy_key(s) + + @property + def is_prerelease(self): + result = False + for x in self._parts: + if (isinstance(x, string_types) and x.startswith('*') and x < '*final'): + result = True + break + return result + + +class LegacyMatcher(Matcher): + version_class = LegacyVersion + + _operators = dict(Matcher._operators) + _operators['~='] = '_match_compatible' + + numeric_re = re.compile(r'^(\d+(\.\d+)*)') + + def _match_compatible(self, version, constraint, prefix): + if version < constraint: + return False + m = self.numeric_re.match(str(constraint)) + if not m: + logger.warning('Cannot compute compatible match for version %s ' + ' and constraint %s', version, constraint) + return True + s = m.groups()[0] + if '.' in s: + s = s.rsplit('.', 1)[0] + return _match_prefix(version, s) + +# +# Semantic versioning +# + + +_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' + r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' + r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) + + +def is_semver(s): + return _SEMVER_RE.match(s) + + +def _semantic_key(s): + def make_tuple(s, absent): + if s is None: + result = (absent,) + else: + parts = s[1:].split('.') + # We can't compare ints and strings on Python 3, so fudge it + # by zero-filling numeric values so simulate a numeric comparison + result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) + return result + + m = is_semver(s) + if not m: + raise UnsupportedVersionError(s) + groups = m.groups() + major, minor, patch = [int(i) for i in groups[:3]] + # choose the '|' and '*' so that versions sort correctly + pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') + return (major, minor, patch), pre, build + + +class SemanticVersion(Version): + def parse(self, s): + return _semantic_key(s) + + @property + def is_prerelease(self): + return self._parts[1][0] != '|' + + +class SemanticMatcher(Matcher): + version_class = SemanticVersion + + +class VersionScheme(object): + def __init__(self, key, matcher, suggester=None): + self.key = key + self.matcher = matcher + self.suggester = suggester + + def is_valid_version(self, s): + try: + self.matcher.version_class(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_matcher(self, s): + try: + self.matcher(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_constraint_list(self, s): + """ + Used for processing some metadata fields + """ + # See issue #140. Be tolerant of a single trailing comma. + if s.endswith(','): + s = s[:-1] + return self.is_valid_matcher('dummy_name (%s)' % s) + + def suggest(self, s): + if self.suggester is None: + result = None + else: + result = self.suggester(s) + return result + + +_SCHEMES = { + 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, + _suggest_normalized_version), + 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), + 'semantic': VersionScheme(_semantic_key, SemanticMatcher, + _suggest_semantic_version), +} + +_SCHEMES['default'] = _SCHEMES['normalized'] + + +def get_scheme(name): + if name not in _SCHEMES: + raise ValueError('unknown scheme name: %r' % name) + return _SCHEMES[name] diff --git a/.venv/Lib/site-packages/distlib/w32.exe b/.venv/Lib/site-packages/distlib/w32.exe new file mode 100644 index 00000000..4ee2d3a3 Binary files /dev/null and b/.venv/Lib/site-packages/distlib/w32.exe differ diff --git a/.venv/Lib/site-packages/distlib/w64-arm.exe b/.venv/Lib/site-packages/distlib/w64-arm.exe new file mode 100644 index 00000000..951d5817 Binary files /dev/null and b/.venv/Lib/site-packages/distlib/w64-arm.exe differ diff --git a/.venv/Lib/site-packages/distlib/w64.exe b/.venv/Lib/site-packages/distlib/w64.exe new file mode 100644 index 00000000..5763076d Binary files /dev/null and b/.venv/Lib/site-packages/distlib/w64.exe differ diff --git a/.venv/Lib/site-packages/distlib/wheel.py b/.venv/Lib/site-packages/distlib/wheel.py new file mode 100644 index 00000000..62ab10fb --- /dev/null +++ b/.venv/Lib/site-packages/distlib/wheel.py @@ -0,0 +1,1100 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import base64 +import codecs +import datetime +from email import message_from_file +import hashlib +import json +import logging +import os +import posixpath +import re +import shutil +import sys +import tempfile +import zipfile + +from . import __version__, DistlibException +from .compat import sysconfig, ZipFile, fsdecode, text_type, filter +from .database import InstalledDistribution +from .metadata import Metadata, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME +from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, cached_property, get_cache_base, + read_exports, tempdir, get_platform) +from .version import NormalizedVersion, UnsupportedVersionError + +logger = logging.getLogger(__name__) + +cache = None # created when needed + +if hasattr(sys, 'pypy_version_info'): # pragma: no cover + IMP_PREFIX = 'pp' +elif sys.platform.startswith('java'): # pragma: no cover + IMP_PREFIX = 'jy' +elif sys.platform == 'cli': # pragma: no cover + IMP_PREFIX = 'ip' +else: + IMP_PREFIX = 'cp' + +VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') +if not VER_SUFFIX: # pragma: no cover + VER_SUFFIX = '%s%s' % sys.version_info[:2] +PYVER = 'py' + VER_SUFFIX +IMPVER = IMP_PREFIX + VER_SUFFIX + +ARCH = get_platform().replace('-', '_').replace('.', '_') + +ABI = sysconfig.get_config_var('SOABI') +if ABI and ABI.startswith('cpython-'): + ABI = ABI.replace('cpython-', 'cp').split('-')[0] +else: + + def _derive_abi(): + parts = ['cp', VER_SUFFIX] + if sysconfig.get_config_var('Py_DEBUG'): + parts.append('d') + if IMP_PREFIX == 'cp': + vi = sys.version_info[:2] + if vi < (3, 8): + wpm = sysconfig.get_config_var('WITH_PYMALLOC') + if wpm is None: + wpm = True + if wpm: + parts.append('m') + if vi < (3, 3): + us = sysconfig.get_config_var('Py_UNICODE_SIZE') + if us == 4 or (us is None and sys.maxunicode == 0x10FFFF): + parts.append('u') + return ''.join(parts) + + ABI = _derive_abi() + del _derive_abi + +FILENAME_RE = re.compile( + r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))? +-(?P\w+\d+(\.\w+\d+)*) +-(?P\w+) +-(?P\w+(\.\w+)*) +\.whl$ +''', re.IGNORECASE | re.VERBOSE) + +NAME_VERSION_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))?$ +''', re.IGNORECASE | re.VERBOSE) + +SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') +SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') +SHEBANG_PYTHON = b'#!python' +SHEBANG_PYTHONW = b'#!pythonw' + +if os.sep == '/': + to_posix = lambda o: o +else: + to_posix = lambda o: o.replace(os.sep, '/') + +if sys.version_info[0] < 3: + import imp +else: + imp = None + import importlib.machinery + import importlib.util + + +def _get_suffixes(): + if imp: + return [s[0] for s in imp.get_suffixes()] + else: + return importlib.machinery.EXTENSION_SUFFIXES + + +def _load_dynamic(name, path): + # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly + if imp: + return imp.load_dynamic(name, path) + else: + spec = importlib.util.spec_from_file_location(name, path) + module = importlib.util.module_from_spec(spec) + sys.modules[name] = module + spec.loader.exec_module(module) + return module + + +class Mounter(object): + + def __init__(self): + self.impure_wheels = {} + self.libs = {} + + def add(self, pathname, extensions): + self.impure_wheels[pathname] = extensions + self.libs.update(extensions) + + def remove(self, pathname): + extensions = self.impure_wheels.pop(pathname) + for k, v in extensions: + if k in self.libs: + del self.libs[k] + + def find_module(self, fullname, path=None): + if fullname in self.libs: + result = self + else: + result = None + return result + + def load_module(self, fullname): + if fullname in sys.modules: + result = sys.modules[fullname] + else: + if fullname not in self.libs: + raise ImportError('unable to find extension for %s' % fullname) + result = _load_dynamic(fullname, self.libs[fullname]) + result.__loader__ = self + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] + return result + + +_hook = Mounter() + + +class Wheel(object): + """ + Class to build and install from Wheel files (PEP 427). + """ + + wheel_version = (1, 1) + hash_kind = 'sha256' + + def __init__(self, filename=None, sign=False, verify=False): + """ + Initialise an instance using a (valid) filename. + """ + self.sign = sign + self.should_verify = verify + self.buildver = '' + self.pyver = [PYVER] + self.abi = ['none'] + self.arch = ['any'] + self.dirname = os.getcwd() + if filename is None: + self.name = 'dummy' + self.version = '0.1' + self._filename = self.filename + else: + m = NAME_VERSION_RE.match(filename) + if m: + info = m.groupdict('') + self.name = info['nm'] + # Reinstate the local version separator + self.version = info['vn'].replace('_', '-') + self.buildver = info['bn'] + self._filename = self.filename + else: + dirname, filename = os.path.split(filename) + m = FILENAME_RE.match(filename) + if not m: + raise DistlibException('Invalid name or ' + 'filename: %r' % filename) + if dirname: + self.dirname = os.path.abspath(dirname) + self._filename = filename + info = m.groupdict('') + self.name = info['nm'] + self.version = info['vn'] + self.buildver = info['bn'] + self.pyver = info['py'].split('.') + self.abi = info['bi'].split('.') + self.arch = info['ar'].split('.') + + @property + def filename(self): + """ + Build and return a filename from the various components. + """ + if self.buildver: + buildver = '-' + self.buildver + else: + buildver = '' + pyver = '.'.join(self.pyver) + abi = '.'.join(self.abi) + arch = '.'.join(self.arch) + # replace - with _ as a local version separator + version = self.version.replace('-', '_') + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver, abi, arch) + + @property + def exists(self): + path = os.path.join(self.dirname, self.filename) + return os.path.isfile(path) + + @property + def tags(self): + for pyver in self.pyver: + for abi in self.abi: + for arch in self.arch: + yield pyver, abi, arch + + @cached_property + def metadata(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + wrapper = codecs.getreader('utf-8') + with ZipFile(pathname, 'r') as zf: + self.get_wheel_metadata(zf) + # wv = wheel_metadata['Wheel-Version'].split('.', 1) + # file_version = tuple([int(i) for i in wv]) + # if file_version < (1, 1): + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] + # else: + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] + result = None + for fn in fns: + try: + metadata_filename = posixpath.join(info_dir, fn) + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + if result: + break + except KeyError: + pass + if not result: + raise ValueError('Invalid wheel, because metadata is ' + 'missing: looked in %s' % ', '.join(fns)) + return result + + def get_wheel_metadata(self, zf): + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + metadata_filename = posixpath.join(info_dir, 'WHEEL') + with zf.open(metadata_filename) as bf: + wf = codecs.getreader('utf-8')(bf) + message = message_from_file(wf) + return dict(message) + + @cached_property + def info(self): + pathname = os.path.join(self.dirname, self.filename) + with ZipFile(pathname, 'r') as zf: + result = self.get_wheel_metadata(zf) + return result + + def process_shebang(self, data): + m = SHEBANG_RE.match(data) + if m: + end = m.end() + shebang, data_after_shebang = data[:end], data[end:] + # Preserve any arguments after the interpreter + if b'pythonw' in shebang.lower(): + shebang_python = SHEBANG_PYTHONW + else: + shebang_python = SHEBANG_PYTHON + m = SHEBANG_DETAIL_RE.match(shebang) + if m: + args = b' ' + m.groups()[-1] + else: + args = b'' + shebang = shebang_python + args + data = shebang + data_after_shebang + else: + cr = data.find(b'\r') + lf = data.find(b'\n') + if cr < 0 or cr > lf: + term = b'\n' + else: + if data[cr:cr + 2] == b'\r\n': + term = b'\r\n' + else: + term = b'\r' + data = SHEBANG_PYTHON + term + data + return data + + def get_hash(self, data, hash_kind=None): + if hash_kind is None: + hash_kind = self.hash_kind + try: + hasher = getattr(hashlib, hash_kind) + except AttributeError: + raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + result = hasher(data).digest() + result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') + return hash_kind, result + + def write_record(self, records, record_path, archive_record_path): + records = list(records) # make a copy, as mutated + records.append((archive_record_path, '', '')) + with CSVWriter(record_path) as writer: + for row in records: + writer.writerow(row) + + def write_records(self, info, libdir, archive_paths): + records = [] + distinfo, info_dir = info + # hasher = getattr(hashlib, self.hash_kind) + for ap, p in archive_paths: + with open(p, 'rb') as f: + data = f.read() + digest = '%s=%s' % self.get_hash(data) + size = os.path.getsize(p) + records.append((ap, digest, size)) + + p = os.path.join(distinfo, 'RECORD') + ap = to_posix(os.path.join(info_dir, 'RECORD')) + self.write_record(records, p, ap) + archive_paths.append((ap, p)) + + def build_zip(self, pathname, archive_paths): + with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: + for ap, p in archive_paths: + logger.debug('Wrote %s to %s in wheel', p, ap) + zf.write(p, ap) + + def build(self, paths, tags=None, wheel_version=None): + """ + Build a wheel from files in specified paths, and use any specified tags + when determining the name of the wheel. + """ + if tags is None: + tags = {} + + libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] + if libkey == 'platlib': + is_pure = 'false' + default_pyver = [IMPVER] + default_abi = [ABI] + default_arch = [ARCH] + else: + is_pure = 'true' + default_pyver = [PYVER] + default_abi = ['none'] + default_arch = ['any'] + + self.pyver = tags.get('pyver', default_pyver) + self.abi = tags.get('abi', default_abi) + self.arch = tags.get('arch', default_arch) + + libdir = paths[libkey] + + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + archive_paths = [] + + # First, stuff which is not in site-packages + for key in ('data', 'headers', 'scripts'): + if key not in paths: + continue + path = paths[key] + if os.path.isdir(path): + for root, dirs, files in os.walk(path): + for fn in files: + p = fsdecode(os.path.join(root, fn)) + rp = os.path.relpath(p, path) + ap = to_posix(os.path.join(data_dir, key, rp)) + archive_paths.append((ap, p)) + if key == 'scripts' and not p.endswith('.exe'): + with open(p, 'rb') as f: + data = f.read() + data = self.process_shebang(data) + with open(p, 'wb') as f: + f.write(data) + + # Now, stuff which is in site-packages, other than the + # distinfo stuff. + path = libdir + distinfo = None + for root, dirs, files in os.walk(path): + if root == path: + # At the top level only, save distinfo for later + # and skip it for now + for i, dn in enumerate(dirs): + dn = fsdecode(dn) + if dn.endswith('.dist-info'): + distinfo = os.path.join(root, dn) + del dirs[i] + break + assert distinfo, '.dist-info directory expected, not found' + + for fn in files: + # comment out next suite to leave .pyc files in + if fsdecode(fn).endswith(('.pyc', '.pyo')): + continue + p = os.path.join(root, fn) + rp = to_posix(os.path.relpath(p, path)) + archive_paths.append((rp, p)) + + # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. + files = os.listdir(distinfo) + for fn in files: + if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): + p = fsdecode(os.path.join(distinfo, fn)) + ap = to_posix(os.path.join(info_dir, fn)) + archive_paths.append((ap, p)) + + wheel_metadata = [ + 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), + 'Generator: distlib %s' % __version__, + 'Root-Is-Purelib: %s' % is_pure, + ] + for pyver, abi, arch in self.tags: + wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) + p = os.path.join(distinfo, 'WHEEL') + with open(p, 'w') as f: + f.write('\n'.join(wheel_metadata)) + ap = to_posix(os.path.join(info_dir, 'WHEEL')) + archive_paths.append((ap, p)) + + # sort the entries by archive path. Not needed by any spec, but it + # keeps the archive listing and RECORD tidier than they would otherwise + # be. Use the number of path segments to keep directory entries together, + # and keep the dist-info stuff at the end. + def sorter(t): + ap = t[0] + n = ap.count('/') + if '.dist-info' in ap: + n += 10000 + return (n, ap) + + archive_paths = sorted(archive_paths, key=sorter) + + # Now, at last, RECORD. + # Paths in here are archive paths - nothing else makes sense. + self.write_records((distinfo, info_dir), libdir, archive_paths) + # Now, ready to build the zip file + pathname = os.path.join(self.dirname, self.filename) + self.build_zip(pathname, archive_paths) + return pathname + + def skip_entry(self, arcname): + """ + Determine whether an archive entry should be skipped when verifying + or installing. + """ + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + # We also skip directories, as they won't be in RECORD + # either. See: + # + # https://github.com/pypa/wheel/issues/294 + # https://github.com/pypa/wheel/issues/287 + # https://github.com/pypa/wheel/pull/289 + # + return arcname.endswith(('/', '/RECORD.jws')) + + def install(self, paths, maker, **kwargs): + """ + Install a wheel to the specified paths. If kwarg ``warner`` is + specified, it should be a callable, which will be called with two + tuples indicating the wheel version of this software and the wheel + version in the file, if there is a discrepancy in the versions. + This can be used to issue any warnings to raise any exceptions. + If kwarg ``lib_only`` is True, only the purelib/platlib files are + installed, and the headers, scripts, data and dist-info metadata are + not written. If kwarg ``bytecode_hashed_invalidation`` is True, written + bytecode will try to use file-hash based invalidation (PEP-552) on + supported interpreter versions (CPython 3.7+). + + The return value is a :class:`InstalledDistribution` instance unless + ``options.lib_only`` is True, in which case the return value is ``None``. + """ + + dry_run = maker.dry_run + warner = kwargs.get('warner') + lib_only = kwargs.get('lib_only', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if (file_version != self.wheel_version) and warner: + warner(self.wheel_version, file_version) + + if message['Root-Is-Purelib'] == 'true': + libdir = paths['purelib'] + else: + libdir = paths['platlib'] + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') + script_pfx = posixpath.join(data_dir, 'scripts', '') + + # make a new instance rather than a copy of maker's, + # as we mutate it + fileop = FileOperator(dry_run=dry_run) + fileop.record = True # so we can rollback if needed + + bc = not sys.dont_write_bytecode # Double negatives. Lovely! + + outfiles = [] # for RECORD writing + + # for script copying/shebang processing + workdir = tempfile.mkdtemp() + # set target dir later + # we default add_launchers to False, as the + # Python Launcher should be used instead + maker.source_dir = workdir + maker.target_dir = None + try: + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue + is_script = (u_arcname.startswith(script_pfx) and not u_arcname.endswith('.exe')) + + if u_arcname.startswith(data_pfx): + _, where, rp = u_arcname.split('/', 2) + outfile = os.path.join(paths[where], convert_path(rp)) + else: + # meant for site-packages. + if u_arcname in (wheel_metadata_name, record_name): + continue + outfile = os.path.join(libdir, convert_path(u_arcname)) + if not is_script: + with zf.open(arcname) as bf: + fileop.copy_stream(bf, outfile) + # Issue #147: permission bits aren't preserved. Using + # zf.extract(zinfo, libdir) should have worked, but didn't, + # see https://www.thetopsites.net/article/53834422.shtml + # So ... manually preserve permission bits as given in zinfo + if os.name == 'posix': + # just set the normal permission bits + os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF) + outfiles.append(outfile) + # Double check the digest of the written file + if not dry_run and row[1]: + with open(outfile, 'rb') as bf: + data = bf.read() + _, newdigest = self.get_hash(data, kind) + if newdigest != digest: + raise DistlibException('digest mismatch ' + 'on write for ' + '%s' % outfile) + if bc and outfile.endswith('.py'): + try: + pyc = fileop.byte_compile(outfile, hashed_invalidation=bc_hashed_invalidation) + outfiles.append(pyc) + except Exception: + # Don't give up if byte-compilation fails, + # but log it and perhaps warn the user + logger.warning('Byte-compilation failed', exc_info=True) + else: + fn = os.path.basename(convert_path(arcname)) + workname = os.path.join(workdir, fn) + with zf.open(arcname) as bf: + fileop.copy_stream(bf, workname) + + dn, fn = os.path.split(outfile) + maker.target_dir = dn + filenames = maker.make(fn) + fileop.set_executable_mode(filenames) + outfiles.extend(filenames) + + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + # Generate scripts + + # Try to get pydist.json so we can see if there are + # any commands to generate. If this fails (e.g. because + # of a legacy wheel), log a warning but don't give up. + commands = None + file_version = self.info['Wheel-Version'] + if file_version == '1.0': + # Use legacy info + ep = posixpath.join(info_dir, 'entry_points.txt') + try: + with zf.open(ep) as bwf: + epdata = read_exports(bwf) + commands = {} + for key in ('console', 'gui'): + k = '%s_scripts' % key + if k in epdata: + commands['wrap_%s' % key] = d = {} + for v in epdata[k].values(): + s = '%s:%s' % (v.prefix, v.suffix) + if v.flags: + s += ' [%s]' % ','.join(v.flags) + d[v.name] = s + except Exception: + logger.warning('Unable to read legacy script ' + 'metadata, so cannot generate ' + 'scripts') + else: + try: + with zf.open(metadata_name) as bwf: + wf = wrapper(bwf) + commands = json.load(wf).get('extensions') + if commands: + commands = commands.get('python.commands') + except Exception: + logger.warning('Unable to read JSON metadata, so ' + 'cannot generate scripts') + if commands: + console_scripts = commands.get('wrap_console', {}) + gui_scripts = commands.get('wrap_gui', {}) + if console_scripts or gui_scripts: + script_dir = paths.get('scripts', '') + if not os.path.isdir(script_dir): + raise ValueError('Valid script path not ' + 'specified') + maker.target_dir = script_dir + for k, v in console_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script) + fileop.set_executable_mode(filenames) + + if gui_scripts: + options = {'gui': True} + for k, v in gui_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script, options) + fileop.set_executable_mode(filenames) + + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], dry_run) + return dist + except Exception: # pragma: no cover + logger.exception('installation failed.') + fileop.rollback() + raise + finally: + shutil.rmtree(workdir) + + def _get_dylib_cache(self): + global cache + if cache is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('dylib-cache'), '%s.%s' % sys.version_info[:2]) + cache = Cache(base) + return cache + + def _get_extensions(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + arcname = posixpath.join(info_dir, 'EXTENSIONS') + wrapper = codecs.getreader('utf-8') + result = [] + with ZipFile(pathname, 'r') as zf: + try: + with zf.open(arcname) as bf: + wf = wrapper(bf) + extensions = json.load(wf) + cache = self._get_dylib_cache() + prefix = cache.prefix_to_dir(self.filename, use_abspath=False) + cache_base = os.path.join(cache.base, prefix) + if not os.path.isdir(cache_base): + os.makedirs(cache_base) + for name, relpath in extensions.items(): + dest = os.path.join(cache_base, convert_path(relpath)) + if not os.path.exists(dest): + extract = True + else: + file_time = os.stat(dest).st_mtime + file_time = datetime.datetime.fromtimestamp(file_time) + info = zf.getinfo(relpath) + wheel_time = datetime.datetime(*info.date_time) + extract = wheel_time > file_time + if extract: + zf.extract(relpath, cache_base) + result.append((name, dest)) + except KeyError: + pass + return result + + def is_compatible(self): + """ + Determine if a wheel is compatible with the running system. + """ + return is_compatible(self) + + def is_mountable(self): + """ + Determine if a wheel is asserted as mountable by its metadata. + """ + return True # for now - metadata details TBD + + def mount(self, append=False): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if not self.is_compatible(): + msg = 'Wheel %s not compatible with this Python.' % pathname + raise DistlibException(msg) + if not self.is_mountable(): + msg = 'Wheel %s is marked as not mountable.' % pathname + raise DistlibException(msg) + if pathname in sys.path: + logger.debug('%s already in path', pathname) + else: + if append: + sys.path.append(pathname) + else: + sys.path.insert(0, pathname) + extensions = self._get_extensions() + if extensions: + if _hook not in sys.meta_path: + sys.meta_path.append(_hook) + _hook.add(pathname, extensions) + + def unmount(self): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if pathname not in sys.path: + logger.debug('%s not in path', pathname) + else: + sys.path.remove(pathname) + if pathname in _hook.impure_wheels: + _hook.remove(pathname) + if not _hook.impure_wheels: + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + def verify(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + # data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + # metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message_from_file(wf) + # wv = message['Wheel-Version'].split('.', 1) + # file_version = tuple([int(i) for i in wv]) + # TODO version verification + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + # See issue #115: some wheels have .. in their entries, but + # in the filename ... e.g. __main__..py ! So the check is + # updated to look for .. in the directory portions + p = u_arcname.split('/') + if '..' in p: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + def update(self, modifier, dest_dir=None, **kwargs): + """ + Update the contents of a wheel in a generic way. The modifier should + be a callable which expects a dictionary argument: its keys are + archive-entry paths, and its values are absolute filesystem paths + where the contents the corresponding archive entries can be found. The + modifier is free to change the contents of the files pointed to, add + new entries and remove entries, before returning. This method will + extract the entire contents of the wheel to a temporary location, call + the modifier, and then use the passed (and possibly updated) + dictionary to write a new wheel. If ``dest_dir`` is specified, the new + wheel is written there -- otherwise, the original wheel is overwritten. + + The modifier should return True if it updated the wheel, else False. + This method returns the same value the modifier returns. + """ + + def get_version(path_map, info_dir): + version = path = None + key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) + if key not in path_map: + key = '%s/PKG-INFO' % info_dir + if key in path_map: + path = path_map[key] + version = Metadata(path=path).version + return version, path + + def update_version(version, path): + updated = None + try: + NormalizedVersion(version) + i = version.find('-') + if i < 0: + updated = '%s+1' % version + else: + parts = [int(s) for s in version[i + 1:].split('.')] + parts[-1] += 1 + updated = '%s+%s' % (version[:i], '.'.join(str(i) for i in parts)) + except UnsupportedVersionError: + logger.debug('Cannot update non-compliant (PEP-440) ' + 'version %r', version) + if updated: + md = Metadata(path=path) + md.version = updated + legacy = path.endswith(LEGACY_METADATA_FILENAME) + md.write(path=path, legacy=legacy) + logger.debug('Version updated from %r to %r', version, updated) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + record_name = posixpath.join(info_dir, 'RECORD') + with tempdir() as workdir: + with ZipFile(pathname, 'r') as zf: + path_map = {} + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if u_arcname == record_name: + continue + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + zf.extract(zinfo, workdir) + path = os.path.join(workdir, convert_path(u_arcname)) + path_map[u_arcname] = path + + # Remember the version. + original_version, _ = get_version(path_map, info_dir) + # Files extracted. Call the modifier. + modified = modifier(path_map, **kwargs) + if modified: + # Something changed - need to build a new wheel. + current_version, path = get_version(path_map, info_dir) + if current_version and (current_version == original_version): + # Add or update local version to signify changes. + update_version(current_version, path) + # Decide where the new wheel goes. + if dest_dir is None: + fd, newpath = tempfile.mkstemp(suffix='.whl', prefix='wheel-update-', dir=workdir) + os.close(fd) + else: + if not os.path.isdir(dest_dir): + raise DistlibException('Not a directory: %r' % dest_dir) + newpath = os.path.join(dest_dir, self.filename) + archive_paths = list(path_map.items()) + distinfo = os.path.join(workdir, info_dir) + info = distinfo, info_dir + self.write_records(info, workdir, archive_paths) + self.build_zip(newpath, archive_paths) + if dest_dir is None: + shutil.copyfile(newpath, pathname) + return modified + + +def _get_glibc_version(): + import platform + ver = platform.libc_ver() + result = [] + if ver[0] == 'glibc': + for s in ver[1].split('.'): + result.append(int(s) if s.isdigit() else 0) + result = tuple(result) + return result + + +def compatible_tags(): + """ + Return (pyver, abi, arch) tuples compatible with this Python. + """ + class _Version: + def __init__(self, major, minor): + self.major = major + self.major_minor = (major, minor) + self.string = ''.join((str(major), str(minor))) + + def __str__(self): + return self.string + + + versions = [ + _Version(sys.version_info.major, minor_version) + for minor_version in range(sys.version_info.minor, -1, -1) + ] + abis = [] + for suffix in _get_suffixes(): + if suffix.startswith('.abi'): + abis.append(suffix.split('.', 2)[1]) + abis.sort() + if ABI != 'none': + abis.insert(0, ABI) + abis.append('none') + result = [] + + arches = [ARCH] + if sys.platform == 'darwin': + m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) + if m: + name, major, minor, arch = m.groups() + minor = int(minor) + matches = [arch] + if arch in ('i386', 'ppc'): + matches.append('fat') + if arch in ('i386', 'ppc', 'x86_64'): + matches.append('fat3') + if arch in ('ppc64', 'x86_64'): + matches.append('fat64') + if arch in ('i386', 'x86_64'): + matches.append('intel') + if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + matches.append('universal') + while minor >= 0: + for match in matches: + s = '%s_%s_%s_%s' % (name, major, minor, match) + if s != ARCH: # already there + arches.append(s) + minor -= 1 + + # Most specific - our Python version, ABI and arch + for i, version_object in enumerate(versions): + version = str(version_object) + add_abis = [] + + if i == 0: + add_abis = abis + + if IMP_PREFIX == 'cp' and version_object.major_minor >= (3, 2): + limited_api_abi = 'abi' + str(version_object.major) + if limited_api_abi not in add_abis: + add_abis.append(limited_api_abi) + + for abi in add_abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, version)), abi, arch)) + # manylinux + if abi != 'none' and sys.platform.startswith('linux'): + arch = arch.replace('linux_', '') + parts = _get_glibc_version() + if len(parts) == 2: + if parts >= (2, 5): + result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux1_%s' % arch)) + if parts >= (2, 12): + result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux2010_%s' % arch)) + if parts >= (2, 17): + result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux2014_%s' % arch)) + result.append((''.join( + (IMP_PREFIX, version)), abi, 'manylinux_%s_%s_%s' % (parts[0], parts[1], arch))) + + # where no ABI / arch dependency, but IMP_PREFIX dependency + for i, version_object in enumerate(versions): + version = str(version_object) + result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) + if i == 0: + result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) + + # no IMP_PREFIX, ABI or arch dependency + for i, version_object in enumerate(versions): + version = str(version_object) + result.append((''.join(('py', version)), 'none', 'any')) + if i == 0: + result.append((''.join(('py', version[0])), 'none', 'any')) + + return set(result) + + +COMPATIBLE_TAGS = compatible_tags() + +del compatible_tags + + +def is_compatible(wheel, tags=None): + if not isinstance(wheel, Wheel): + wheel = Wheel(wheel) # assume it's a filename + result = False + if tags is None: + tags = COMPATIBLE_TAGS + for ver, abi, arch in tags: + if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: + result = True + break + return result diff --git a/.venv/Lib/site-packages/filelock-3.18.0.dist-info/INSTALLER b/.venv/Lib/site-packages/filelock-3.18.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/filelock-3.18.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/filelock-3.18.0.dist-info/METADATA b/.venv/Lib/site-packages/filelock-3.18.0.dist-info/METADATA new file mode 100644 index 00000000..b640fa40 --- /dev/null +++ b/.venv/Lib/site-packages/filelock-3.18.0.dist-info/METADATA @@ -0,0 +1,58 @@ +Metadata-Version: 2.4 +Name: filelock +Version: 3.18.0 +Summary: A platform independent file lock. +Project-URL: Documentation, https://py-filelock.readthedocs.io +Project-URL: Homepage, https://github.com/tox-dev/py-filelock +Project-URL: Source, https://github.com/tox-dev/py-filelock +Project-URL: Tracker, https://github.com/tox-dev/py-filelock/issues +Maintainer-email: Bernát Gábor +License-Expression: Unlicense +License-File: LICENSE +Keywords: application,cache,directory,log,user +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: The Unlicense (Unlicense) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: System +Requires-Python: >=3.9 +Provides-Extra: docs +Requires-Dist: furo>=2024.8.6; extra == 'docs' +Requires-Dist: sphinx-autodoc-typehints>=3; extra == 'docs' +Requires-Dist: sphinx>=8.1.3; extra == 'docs' +Provides-Extra: testing +Requires-Dist: covdefaults>=2.3; extra == 'testing' +Requires-Dist: coverage>=7.6.10; extra == 'testing' +Requires-Dist: diff-cover>=9.2.1; extra == 'testing' +Requires-Dist: pytest-asyncio>=0.25.2; extra == 'testing' +Requires-Dist: pytest-cov>=6; extra == 'testing' +Requires-Dist: pytest-mock>=3.14; extra == 'testing' +Requires-Dist: pytest-timeout>=2.3.1; extra == 'testing' +Requires-Dist: pytest>=8.3.4; extra == 'testing' +Requires-Dist: virtualenv>=20.28.1; extra == 'testing' +Provides-Extra: typing +Requires-Dist: typing-extensions>=4.12.2; (python_version < '3.11') and extra == 'typing' +Description-Content-Type: text/markdown + +# filelock + +[![PyPI](https://img.shields.io/pypi/v/filelock)](https://pypi.org/project/filelock/) +[![Supported Python +versions](https://img.shields.io/pypi/pyversions/filelock.svg)](https://pypi.org/project/filelock/) +[![Documentation +status](https://readthedocs.org/projects/py-filelock/badge/?version=latest)](https://py-filelock.readthedocs.io/en/latest/?badge=latest) +[![Code style: +black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Downloads](https://static.pepy.tech/badge/filelock/month)](https://pepy.tech/project/filelock) +[![check](https://github.com/tox-dev/py-filelock/actions/workflows/check.yaml/badge.svg)](https://github.com/tox-dev/py-filelock/actions/workflows/check.yaml) + +For more information checkout the [official documentation](https://py-filelock.readthedocs.io/en/latest/index.html). diff --git a/.venv/Lib/site-packages/filelock-3.18.0.dist-info/RECORD b/.venv/Lib/site-packages/filelock-3.18.0.dist-info/RECORD new file mode 100644 index 00000000..10eb4208 --- /dev/null +++ b/.venv/Lib/site-packages/filelock-3.18.0.dist-info/RECORD @@ -0,0 +1,24 @@ +filelock-3.18.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +filelock-3.18.0.dist-info/METADATA,sha256=bMzrZMIFytIbgg_WaLomH79i_7KEx8ahX0IJBxbx1_I,2897 +filelock-3.18.0.dist-info/RECORD,, +filelock-3.18.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +filelock-3.18.0.dist-info/licenses/LICENSE,sha256=iNm062BXnBkew5HKBMFhMFctfu3EqG2qWL8oxuFMm80,1210 +filelock/__init__.py,sha256=_t_-OAGXo_qyPa9lNQ1YnzVYEvSW3I0onPqzpomsVVg,1769 +filelock/__pycache__/__init__.cpython-312.pyc,, +filelock/__pycache__/_api.cpython-312.pyc,, +filelock/__pycache__/_error.cpython-312.pyc,, +filelock/__pycache__/_soft.cpython-312.pyc,, +filelock/__pycache__/_unix.cpython-312.pyc,, +filelock/__pycache__/_util.cpython-312.pyc,, +filelock/__pycache__/_windows.cpython-312.pyc,, +filelock/__pycache__/asyncio.cpython-312.pyc,, +filelock/__pycache__/version.cpython-312.pyc,, +filelock/_api.py,sha256=2aATBeJ3-jtMj5OSm7EE539iNaTBsf13KXtcBMoi8oM,14545 +filelock/_error.py,sha256=-5jMcjTu60YAvAO1UbqDD1GIEjVkwr8xCFwDBtMeYDg,787 +filelock/_soft.py,sha256=haqtc_TB_KJbYv2a8iuEAclKuM4fMG1vTcp28sK919c,1711 +filelock/_unix.py,sha256=eGOs4gDgZ-5fGnJUz-OkJDeZkAMzgvYcD8hVD6XH7e4,2351 +filelock/_util.py,sha256=QHBoNFIYfbAThhotH3Q8E2acFc84wpG49-T-uu017ZE,1715 +filelock/_windows.py,sha256=8k4XIBl_zZVfGC2gz0kEr8DZBvpNa8wdU9qeM1YrBb8,2179 +filelock/asyncio.py,sha256=EZdJVkbMnZMuQwzuPN5IvXD0Ugzt__vOtrMP4-siVeU,12451 +filelock/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +filelock/version.py,sha256=D9gAiF9PGH4dQFjbe6VcXhU8kyCLpU7-c7_vfZP--Hc,513 diff --git a/.venv/Lib/site-packages/filelock-3.18.0.dist-info/WHEEL b/.venv/Lib/site-packages/filelock-3.18.0.dist-info/WHEEL new file mode 100644 index 00000000..12228d41 --- /dev/null +++ b/.venv/Lib/site-packages/filelock-3.18.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/Lib/site-packages/filelock-3.18.0.dist-info/licenses/LICENSE b/.venv/Lib/site-packages/filelock-3.18.0.dist-info/licenses/LICENSE new file mode 100644 index 00000000..cf1ab25d --- /dev/null +++ b/.venv/Lib/site-packages/filelock-3.18.0.dist-info/licenses/LICENSE @@ -0,0 +1,24 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to diff --git a/.venv/Lib/site-packages/filelock/__init__.py b/.venv/Lib/site-packages/filelock/__init__.py new file mode 100644 index 00000000..c9d8c5b8 --- /dev/null +++ b/.venv/Lib/site-packages/filelock/__init__.py @@ -0,0 +1,70 @@ +""" +A platform independent file lock that supports the with-statement. + +.. autodata:: filelock.__version__ + :no-value: + +""" + +from __future__ import annotations + +import sys +import warnings +from typing import TYPE_CHECKING + +from ._api import AcquireReturnProxy, BaseFileLock +from ._error import Timeout +from ._soft import SoftFileLock +from ._unix import UnixFileLock, has_fcntl +from ._windows import WindowsFileLock +from .asyncio import ( + AsyncAcquireReturnProxy, + AsyncSoftFileLock, + AsyncUnixFileLock, + AsyncWindowsFileLock, + BaseAsyncFileLock, +) +from .version import version + +#: version of the project as a string +__version__: str = version + + +if sys.platform == "win32": # pragma: win32 cover + _FileLock: type[BaseFileLock] = WindowsFileLock + _AsyncFileLock: type[BaseAsyncFileLock] = AsyncWindowsFileLock +else: # pragma: win32 no cover # noqa: PLR5501 + if has_fcntl: + _FileLock: type[BaseFileLock] = UnixFileLock + _AsyncFileLock: type[BaseAsyncFileLock] = AsyncUnixFileLock + else: + _FileLock = SoftFileLock + _AsyncFileLock = AsyncSoftFileLock + if warnings is not None: + warnings.warn("only soft file lock is available", stacklevel=2) + +if TYPE_CHECKING: + FileLock = SoftFileLock + AsyncFileLock = AsyncSoftFileLock +else: + #: Alias for the lock, which should be used for the current platform. + FileLock = _FileLock + AsyncFileLock = _AsyncFileLock + + +__all__ = [ + "AcquireReturnProxy", + "AsyncAcquireReturnProxy", + "AsyncFileLock", + "AsyncSoftFileLock", + "AsyncUnixFileLock", + "AsyncWindowsFileLock", + "BaseAsyncFileLock", + "BaseFileLock", + "FileLock", + "SoftFileLock", + "Timeout", + "UnixFileLock", + "WindowsFileLock", + "__version__", +] diff --git a/.venv/Lib/site-packages/filelock/_api.py b/.venv/Lib/site-packages/filelock/_api.py new file mode 100644 index 00000000..8fde69a0 --- /dev/null +++ b/.venv/Lib/site-packages/filelock/_api.py @@ -0,0 +1,403 @@ +from __future__ import annotations + +import contextlib +import inspect +import logging +import os +import time +import warnings +from abc import ABCMeta, abstractmethod +from dataclasses import dataclass +from threading import local +from typing import TYPE_CHECKING, Any, cast +from weakref import WeakValueDictionary + +from ._error import Timeout + +if TYPE_CHECKING: + import sys + from types import TracebackType + + if sys.version_info >= (3, 11): # pragma: no cover (py311+) + from typing import Self + else: # pragma: no cover ( None: + self.lock = lock + + def __enter__(self) -> BaseFileLock: + return self.lock + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + self.lock.release() + + +@dataclass +class FileLockContext: + """A dataclass which holds the context for a ``BaseFileLock`` object.""" + + # The context is held in a separate class to allow optional use of thread local storage via the + # ThreadLocalFileContext class. + + #: The path to the lock file. + lock_file: str + + #: The default timeout value. + timeout: float + + #: The mode for the lock files + mode: int + + #: Whether the lock should be blocking or not + blocking: bool + + #: The file descriptor for the *_lock_file* as it is returned by the os.open() function, not None when lock held + lock_file_fd: int | None = None + + #: The lock counter is used for implementing the nested locking mechanism. + lock_counter: int = 0 # When the lock is acquired is increased and the lock is only released, when this value is 0 + + +class ThreadLocalFileContext(FileLockContext, local): + """A thread local version of the ``FileLockContext`` class.""" + + +class FileLockMeta(ABCMeta): + def __call__( # noqa: PLR0913 + cls, + lock_file: str | os.PathLike[str], + timeout: float = -1, + mode: int = 0o644, + thread_local: bool = True, # noqa: FBT001, FBT002 + *, + blocking: bool = True, + is_singleton: bool = False, + **kwargs: Any, # capture remaining kwargs for subclasses # noqa: ANN401 + ) -> BaseFileLock: + if is_singleton: + instance = cls._instances.get(str(lock_file)) # type: ignore[attr-defined] + if instance: + params_to_check = { + "thread_local": (thread_local, instance.is_thread_local()), + "timeout": (timeout, instance.timeout), + "mode": (mode, instance.mode), + "blocking": (blocking, instance.blocking), + } + + non_matching_params = { + name: (passed_param, set_param) + for name, (passed_param, set_param) in params_to_check.items() + if passed_param != set_param + } + if not non_matching_params: + return cast("BaseFileLock", instance) + + # parameters do not match; raise error + msg = "Singleton lock instances cannot be initialized with differing arguments" + msg += "\nNon-matching arguments: " + for param_name, (passed_param, set_param) in non_matching_params.items(): + msg += f"\n\t{param_name} (existing lock has {set_param} but {passed_param} was passed)" + raise ValueError(msg) + + # Workaround to make `__init__`'s params optional in subclasses + # E.g. virtualenv changes the signature of the `__init__` method in the `BaseFileLock` class descendant + # (https://github.com/tox-dev/filelock/pull/340) + + all_params = { + "timeout": timeout, + "mode": mode, + "thread_local": thread_local, + "blocking": blocking, + "is_singleton": is_singleton, + **kwargs, + } + + present_params = inspect.signature(cls.__init__).parameters # type: ignore[misc] + init_params = {key: value for key, value in all_params.items() if key in present_params} + + instance = super().__call__(lock_file, **init_params) + + if is_singleton: + cls._instances[str(lock_file)] = instance # type: ignore[attr-defined] + + return cast("BaseFileLock", instance) + + +class BaseFileLock(contextlib.ContextDecorator, metaclass=FileLockMeta): + """Abstract base class for a file lock object.""" + + _instances: WeakValueDictionary[str, BaseFileLock] + + def __init_subclass__(cls, **kwargs: dict[str, Any]) -> None: + """Setup unique state for lock subclasses.""" + super().__init_subclass__(**kwargs) + cls._instances = WeakValueDictionary() + + def __init__( # noqa: PLR0913 + self, + lock_file: str | os.PathLike[str], + timeout: float = -1, + mode: int = 0o644, + thread_local: bool = True, # noqa: FBT001, FBT002 + *, + blocking: bool = True, + is_singleton: bool = False, + ) -> None: + """ + Create a new lock object. + + :param lock_file: path to the file + :param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in \ + the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it \ + to a negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock. + :param mode: file permissions for the lockfile + :param thread_local: Whether this object's internal context should be thread local or not. If this is set to \ + ``False`` then the lock will be reentrant across threads. + :param blocking: whether the lock should be blocking or not + :param is_singleton: If this is set to ``True`` then only one instance of this class will be created \ + per lock file. This is useful if you want to use the lock object for reentrant locking without needing \ + to pass the same object around. + + """ + self._is_thread_local = thread_local + self._is_singleton = is_singleton + + # Create the context. Note that external code should not work with the context directly and should instead use + # properties of this class. + kwargs: dict[str, Any] = { + "lock_file": os.fspath(lock_file), + "timeout": timeout, + "mode": mode, + "blocking": blocking, + } + self._context: FileLockContext = (ThreadLocalFileContext if thread_local else FileLockContext)(**kwargs) + + def is_thread_local(self) -> bool: + """:return: a flag indicating if this lock is thread local or not""" + return self._is_thread_local + + @property + def is_singleton(self) -> bool: + """:return: a flag indicating if this lock is singleton or not""" + return self._is_singleton + + @property + def lock_file(self) -> str: + """:return: path to the lock file""" + return self._context.lock_file + + @property + def timeout(self) -> float: + """ + :return: the default timeout value, in seconds + + .. versionadded:: 2.0.0 + """ + return self._context.timeout + + @timeout.setter + def timeout(self, value: float | str) -> None: + """ + Change the default timeout value. + + :param value: the new value, in seconds + + """ + self._context.timeout = float(value) + + @property + def blocking(self) -> bool: + """:return: whether the locking is blocking or not""" + return self._context.blocking + + @blocking.setter + def blocking(self, value: bool) -> None: + """ + Change the default blocking value. + + :param value: the new value as bool + + """ + self._context.blocking = value + + @property + def mode(self) -> int: + """:return: the file permissions for the lockfile""" + return self._context.mode + + @abstractmethod + def _acquire(self) -> None: + """If the file lock could be acquired, self._context.lock_file_fd holds the file descriptor of the lock file.""" + raise NotImplementedError + + @abstractmethod + def _release(self) -> None: + """Releases the lock and sets self._context.lock_file_fd to None.""" + raise NotImplementedError + + @property + def is_locked(self) -> bool: + """ + + :return: A boolean indicating if the lock file is holding the lock currently. + + .. versionchanged:: 2.0.0 + + This was previously a method and is now a property. + """ + return self._context.lock_file_fd is not None + + @property + def lock_counter(self) -> int: + """:return: The number of times this lock has been acquired (but not yet released).""" + return self._context.lock_counter + + def acquire( + self, + timeout: float | None = None, + poll_interval: float = 0.05, + *, + poll_intervall: float | None = None, + blocking: bool | None = None, + ) -> AcquireReturnProxy: + """ + Try to acquire the file lock. + + :param timeout: maximum wait time for acquiring the lock, ``None`` means use the default :attr:`~timeout` is and + if ``timeout < 0``, there is no timeout and this method will block until the lock could be acquired + :param poll_interval: interval of trying to acquire the lock file + :param poll_intervall: deprecated, kept for backwards compatibility, use ``poll_interval`` instead + :param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the + first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired. + :raises Timeout: if fails to acquire lock within the timeout period + :return: a context object that will unlock the file when the context is exited + + .. code-block:: python + + # You can use this method in the context manager (recommended) + with lock.acquire(): + pass + + # Or use an equivalent try-finally construct: + lock.acquire() + try: + pass + finally: + lock.release() + + .. versionchanged:: 2.0.0 + + This method returns now a *proxy* object instead of *self*, + so that it can be used in a with statement without side effects. + + """ + # Use the default timeout, if no timeout is provided. + if timeout is None: + timeout = self._context.timeout + + if blocking is None: + blocking = self._context.blocking + + if poll_intervall is not None: + msg = "use poll_interval instead of poll_intervall" + warnings.warn(msg, DeprecationWarning, stacklevel=2) + poll_interval = poll_intervall + + # Increment the number right at the beginning. We can still undo it, if something fails. + self._context.lock_counter += 1 + + lock_id = id(self) + lock_filename = self.lock_file + start_time = time.perf_counter() + try: + while True: + if not self.is_locked: + _LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename) + self._acquire() + if self.is_locked: + _LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename) + break + if blocking is False: + _LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename) + raise Timeout(lock_filename) # noqa: TRY301 + if 0 <= timeout < time.perf_counter() - start_time: + _LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename) + raise Timeout(lock_filename) # noqa: TRY301 + msg = "Lock %s not acquired on %s, waiting %s seconds ..." + _LOGGER.debug(msg, lock_id, lock_filename, poll_interval) + time.sleep(poll_interval) + except BaseException: # Something did go wrong, so decrement the counter. + self._context.lock_counter = max(0, self._context.lock_counter - 1) + raise + return AcquireReturnProxy(lock=self) + + def release(self, force: bool = False) -> None: # noqa: FBT001, FBT002 + """ + Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0. + Also note, that the lock file itself is not automatically deleted. + + :param force: If true, the lock counter is ignored and the lock is released in every case/ + + """ + if self.is_locked: + self._context.lock_counter -= 1 + + if self._context.lock_counter == 0 or force: + lock_id, lock_filename = id(self), self.lock_file + + _LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename) + self._release() + self._context.lock_counter = 0 + _LOGGER.debug("Lock %s released on %s", lock_id, lock_filename) + + def __enter__(self) -> Self: + """ + Acquire the lock. + + :return: the lock object + + """ + self.acquire() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + """ + Release the lock. + + :param exc_type: the exception type if raised + :param exc_value: the exception value if raised + :param traceback: the exception traceback if raised + + """ + self.release() + + def __del__(self) -> None: + """Called when the lock object is deleted.""" + self.release(force=True) + + +__all__ = [ + "AcquireReturnProxy", + "BaseFileLock", +] diff --git a/.venv/Lib/site-packages/filelock/_error.py b/.venv/Lib/site-packages/filelock/_error.py new file mode 100644 index 00000000..f7ff08c0 --- /dev/null +++ b/.venv/Lib/site-packages/filelock/_error.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from typing import Any + + +class Timeout(TimeoutError): # noqa: N818 + """Raised when the lock could not be acquired in *timeout* seconds.""" + + def __init__(self, lock_file: str) -> None: + super().__init__() + self._lock_file = lock_file + + def __reduce__(self) -> str | tuple[Any, ...]: + return self.__class__, (self._lock_file,) # Properly pickle the exception + + def __str__(self) -> str: + return f"The file lock '{self._lock_file}' could not be acquired." + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.lock_file!r})" + + @property + def lock_file(self) -> str: + """:return: The path of the file lock.""" + return self._lock_file + + +__all__ = [ + "Timeout", +] diff --git a/.venv/Lib/site-packages/filelock/_soft.py b/.venv/Lib/site-packages/filelock/_soft.py new file mode 100644 index 00000000..28c67f74 --- /dev/null +++ b/.venv/Lib/site-packages/filelock/_soft.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +import os +import sys +from contextlib import suppress +from errno import EACCES, EEXIST +from pathlib import Path + +from ._api import BaseFileLock +from ._util import ensure_directory_exists, raise_on_not_writable_file + + +class SoftFileLock(BaseFileLock): + """Simply watches the existence of the lock file.""" + + def _acquire(self) -> None: + raise_on_not_writable_file(self.lock_file) + ensure_directory_exists(self.lock_file) + # first check for exists and read-only mode as the open will mask this case as EEXIST + flags = ( + os.O_WRONLY # open for writing only + | os.O_CREAT + | os.O_EXCL # together with above raise EEXIST if the file specified by filename exists + | os.O_TRUNC # truncate the file to zero byte + ) + try: + file_handler = os.open(self.lock_file, flags, self._context.mode) + except OSError as exception: # re-raise unless expected exception + if not ( + exception.errno == EEXIST # lock already exist + or (exception.errno == EACCES and sys.platform == "win32") # has no access to this lock + ): # pragma: win32 no cover + raise + else: + self._context.lock_file_fd = file_handler + + def _release(self) -> None: + assert self._context.lock_file_fd is not None # noqa: S101 + os.close(self._context.lock_file_fd) # the lock file is definitely not None + self._context.lock_file_fd = None + with suppress(OSError): # the file is already deleted and that's what we want + Path(self.lock_file).unlink() + + +__all__ = [ + "SoftFileLock", +] diff --git a/.venv/Lib/site-packages/filelock/_unix.py b/.venv/Lib/site-packages/filelock/_unix.py new file mode 100644 index 00000000..b2fd0f33 --- /dev/null +++ b/.venv/Lib/site-packages/filelock/_unix.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +import os +import sys +from contextlib import suppress +from errno import ENOSYS +from pathlib import Path +from typing import cast + +from ._api import BaseFileLock +from ._util import ensure_directory_exists + +#: a flag to indicate if the fcntl API is available +has_fcntl = False +if sys.platform == "win32": # pragma: win32 cover + + class UnixFileLock(BaseFileLock): + """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems.""" + + def _acquire(self) -> None: + raise NotImplementedError + + def _release(self) -> None: + raise NotImplementedError + +else: # pragma: win32 no cover + try: + import fcntl + + _ = (fcntl.flock, fcntl.LOCK_EX, fcntl.LOCK_NB, fcntl.LOCK_UN) + except (ImportError, AttributeError): + pass + else: + has_fcntl = True + + class UnixFileLock(BaseFileLock): + """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems.""" + + def _acquire(self) -> None: + ensure_directory_exists(self.lock_file) + open_flags = os.O_RDWR | os.O_TRUNC + if not Path(self.lock_file).exists(): + open_flags |= os.O_CREAT + fd = os.open(self.lock_file, open_flags, self._context.mode) + with suppress(PermissionError): # This locked is not owned by this UID + os.fchmod(fd, self._context.mode) + try: + fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) + except OSError as exception: + os.close(fd) + if exception.errno == ENOSYS: # NotImplemented error + msg = "FileSystem does not appear to support flock; use SoftFileLock instead" + raise NotImplementedError(msg) from exception + else: + self._context.lock_file_fd = fd + + def _release(self) -> None: + # Do not remove the lockfile: + # https://github.com/tox-dev/py-filelock/issues/31 + # https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition + fd = cast("int", self._context.lock_file_fd) + self._context.lock_file_fd = None + fcntl.flock(fd, fcntl.LOCK_UN) + os.close(fd) + + +__all__ = [ + "UnixFileLock", + "has_fcntl", +] diff --git a/.venv/Lib/site-packages/filelock/_util.py b/.venv/Lib/site-packages/filelock/_util.py new file mode 100644 index 00000000..c671e853 --- /dev/null +++ b/.venv/Lib/site-packages/filelock/_util.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import os +import stat +import sys +from errno import EACCES, EISDIR +from pathlib import Path + + +def raise_on_not_writable_file(filename: str) -> None: + """ + Raise an exception if attempting to open the file for writing would fail. + + This is done so files that will never be writable can be separated from files that are writable but currently + locked. + + :param filename: file to check + :raises OSError: as if the file was opened for writing. + + """ + try: # use stat to do exists + can write to check without race condition + file_stat = os.stat(filename) # noqa: PTH116 + except OSError: + return # swallow does not exist or other errors + + if file_stat.st_mtime != 0: # if os.stat returns but modification is zero that's an invalid os.stat - ignore it + if not (file_stat.st_mode & stat.S_IWUSR): + raise PermissionError(EACCES, "Permission denied", filename) + + if stat.S_ISDIR(file_stat.st_mode): + if sys.platform == "win32": # pragma: win32 cover + # On Windows, this is PermissionError + raise PermissionError(EACCES, "Permission denied", filename) + else: # pragma: win32 no cover # noqa: RET506 + # On linux / macOS, this is IsADirectoryError + raise IsADirectoryError(EISDIR, "Is a directory", filename) + + +def ensure_directory_exists(filename: Path | str) -> None: + """ + Ensure the directory containing the file exists (create it if necessary). + + :param filename: file. + + """ + Path(filename).parent.mkdir(parents=True, exist_ok=True) + + +__all__ = [ + "ensure_directory_exists", + "raise_on_not_writable_file", +] diff --git a/.venv/Lib/site-packages/filelock/_windows.py b/.venv/Lib/site-packages/filelock/_windows.py new file mode 100644 index 00000000..348251d1 --- /dev/null +++ b/.venv/Lib/site-packages/filelock/_windows.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import os +import sys +from contextlib import suppress +from errno import EACCES +from pathlib import Path +from typing import cast + +from ._api import BaseFileLock +from ._util import ensure_directory_exists, raise_on_not_writable_file + +if sys.platform == "win32": # pragma: win32 cover + import msvcrt + + class WindowsFileLock(BaseFileLock): + """Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems.""" + + def _acquire(self) -> None: + raise_on_not_writable_file(self.lock_file) + ensure_directory_exists(self.lock_file) + flags = ( + os.O_RDWR # open for read and write + | os.O_CREAT # create file if not exists + | os.O_TRUNC # truncate file if not empty + ) + try: + fd = os.open(self.lock_file, flags, self._context.mode) + except OSError as exception: + if exception.errno != EACCES: # has no access to this lock + raise + else: + try: + msvcrt.locking(fd, msvcrt.LK_NBLCK, 1) + except OSError as exception: + os.close(fd) # close file first + if exception.errno != EACCES: # file is already locked + raise + else: + self._context.lock_file_fd = fd + + def _release(self) -> None: + fd = cast("int", self._context.lock_file_fd) + self._context.lock_file_fd = None + msvcrt.locking(fd, msvcrt.LK_UNLCK, 1) + os.close(fd) + + with suppress(OSError): # Probably another instance of the application hat acquired the file lock. + Path(self.lock_file).unlink() + +else: # pragma: win32 no cover + + class WindowsFileLock(BaseFileLock): + """Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems.""" + + def _acquire(self) -> None: + raise NotImplementedError + + def _release(self) -> None: + raise NotImplementedError + + +__all__ = [ + "WindowsFileLock", +] diff --git a/.venv/Lib/site-packages/filelock/asyncio.py b/.venv/Lib/site-packages/filelock/asyncio.py new file mode 100644 index 00000000..1c9c9f05 --- /dev/null +++ b/.venv/Lib/site-packages/filelock/asyncio.py @@ -0,0 +1,342 @@ +"""An asyncio-based implementation of the file lock.""" + +from __future__ import annotations + +import asyncio +import contextlib +import logging +import os +import time +from dataclasses import dataclass +from threading import local +from typing import TYPE_CHECKING, Any, Callable, NoReturn, cast + +from ._api import BaseFileLock, FileLockContext, FileLockMeta +from ._error import Timeout +from ._soft import SoftFileLock +from ._unix import UnixFileLock +from ._windows import WindowsFileLock + +if TYPE_CHECKING: + import sys + from concurrent import futures + from types import TracebackType + + if sys.version_info >= (3, 11): # pragma: no cover (py311+) + from typing import Self + else: # pragma: no cover ( None: # noqa: D107 + self.lock = lock + + async def __aenter__(self) -> BaseAsyncFileLock: # noqa: D105 + return self.lock + + async def __aexit__( # noqa: D105 + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + await self.lock.release() + + +class AsyncFileLockMeta(FileLockMeta): + def __call__( # type: ignore[override] # noqa: PLR0913 + cls, # noqa: N805 + lock_file: str | os.PathLike[str], + timeout: float = -1, + mode: int = 0o644, + thread_local: bool = False, # noqa: FBT001, FBT002 + *, + blocking: bool = True, + is_singleton: bool = False, + loop: asyncio.AbstractEventLoop | None = None, + run_in_executor: bool = True, + executor: futures.Executor | None = None, + ) -> BaseAsyncFileLock: + if thread_local and run_in_executor: + msg = "run_in_executor is not supported when thread_local is True" + raise ValueError(msg) + instance = super().__call__( + lock_file=lock_file, + timeout=timeout, + mode=mode, + thread_local=thread_local, + blocking=blocking, + is_singleton=is_singleton, + loop=loop, + run_in_executor=run_in_executor, + executor=executor, + ) + return cast("BaseAsyncFileLock", instance) + + +class BaseAsyncFileLock(BaseFileLock, metaclass=AsyncFileLockMeta): + """Base class for asynchronous file locks.""" + + def __init__( # noqa: PLR0913 + self, + lock_file: str | os.PathLike[str], + timeout: float = -1, + mode: int = 0o644, + thread_local: bool = False, # noqa: FBT001, FBT002 + *, + blocking: bool = True, + is_singleton: bool = False, + loop: asyncio.AbstractEventLoop | None = None, + run_in_executor: bool = True, + executor: futures.Executor | None = None, + ) -> None: + """ + Create a new lock object. + + :param lock_file: path to the file + :param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in \ + the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it \ + to a negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock. + :param mode: file permissions for the lockfile + :param thread_local: Whether this object's internal context should be thread local or not. If this is set to \ + ``False`` then the lock will be reentrant across threads. + :param blocking: whether the lock should be blocking or not + :param is_singleton: If this is set to ``True`` then only one instance of this class will be created \ + per lock file. This is useful if you want to use the lock object for reentrant locking without needing \ + to pass the same object around. + :param loop: The event loop to use. If not specified, the running event loop will be used. + :param run_in_executor: If this is set to ``True`` then the lock will be acquired in an executor. + :param executor: The executor to use. If not specified, the default executor will be used. + + """ + self._is_thread_local = thread_local + self._is_singleton = is_singleton + + # Create the context. Note that external code should not work with the context directly and should instead use + # properties of this class. + kwargs: dict[str, Any] = { + "lock_file": os.fspath(lock_file), + "timeout": timeout, + "mode": mode, + "blocking": blocking, + "loop": loop, + "run_in_executor": run_in_executor, + "executor": executor, + } + self._context: AsyncFileLockContext = (AsyncThreadLocalFileContext if thread_local else AsyncFileLockContext)( + **kwargs + ) + + @property + def run_in_executor(self) -> bool: + """::return: whether run in executor.""" + return self._context.run_in_executor + + @property + def executor(self) -> futures.Executor | None: + """::return: the executor.""" + return self._context.executor + + @executor.setter + def executor(self, value: futures.Executor | None) -> None: # pragma: no cover + """ + Change the executor. + + :param value: the new executor or ``None`` + :type value: futures.Executor | None + + """ + self._context.executor = value + + @property + def loop(self) -> asyncio.AbstractEventLoop | None: + """::return: the event loop.""" + return self._context.loop + + async def acquire( # type: ignore[override] + self, + timeout: float | None = None, + poll_interval: float = 0.05, + *, + blocking: bool | None = None, + ) -> AsyncAcquireReturnProxy: + """ + Try to acquire the file lock. + + :param timeout: maximum wait time for acquiring the lock, ``None`` means use the default + :attr:`~BaseFileLock.timeout` is and if ``timeout < 0``, there is no timeout and + this method will block until the lock could be acquired + :param poll_interval: interval of trying to acquire the lock file + :param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the + first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired. + :raises Timeout: if fails to acquire lock within the timeout period + :return: a context object that will unlock the file when the context is exited + + .. code-block:: python + + # You can use this method in the context manager (recommended) + with lock.acquire(): + pass + + # Or use an equivalent try-finally construct: + lock.acquire() + try: + pass + finally: + lock.release() + + """ + # Use the default timeout, if no timeout is provided. + if timeout is None: + timeout = self._context.timeout + + if blocking is None: + blocking = self._context.blocking + + # Increment the number right at the beginning. We can still undo it, if something fails. + self._context.lock_counter += 1 + + lock_id = id(self) + lock_filename = self.lock_file + start_time = time.perf_counter() + try: + while True: + if not self.is_locked: + _LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename) + await self._run_internal_method(self._acquire) + if self.is_locked: + _LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename) + break + if blocking is False: + _LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename) + raise Timeout(lock_filename) # noqa: TRY301 + if 0 <= timeout < time.perf_counter() - start_time: + _LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename) + raise Timeout(lock_filename) # noqa: TRY301 + msg = "Lock %s not acquired on %s, waiting %s seconds ..." + _LOGGER.debug(msg, lock_id, lock_filename, poll_interval) + await asyncio.sleep(poll_interval) + except BaseException: # Something did go wrong, so decrement the counter. + self._context.lock_counter = max(0, self._context.lock_counter - 1) + raise + return AsyncAcquireReturnProxy(lock=self) + + async def release(self, force: bool = False) -> None: # type: ignore[override] # noqa: FBT001, FBT002 + """ + Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0. + Also note, that the lock file itself is not automatically deleted. + + :param force: If true, the lock counter is ignored and the lock is released in every case/ + + """ + if self.is_locked: + self._context.lock_counter -= 1 + + if self._context.lock_counter == 0 or force: + lock_id, lock_filename = id(self), self.lock_file + + _LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename) + await self._run_internal_method(self._release) + self._context.lock_counter = 0 + _LOGGER.debug("Lock %s released on %s", lock_id, lock_filename) + + async def _run_internal_method(self, method: Callable[[], Any]) -> None: + if asyncio.iscoroutinefunction(method): + await method() + elif self.run_in_executor: + loop = self.loop or asyncio.get_running_loop() + await loop.run_in_executor(self.executor, method) + else: + method() + + def __enter__(self) -> NoReturn: + """ + Replace old __enter__ method to avoid using it. + + NOTE: DO NOT USE `with` FOR ASYNCIO LOCKS, USE `async with` INSTEAD. + + :return: none + :rtype: NoReturn + """ + msg = "Do not use `with` for asyncio locks, use `async with` instead." + raise NotImplementedError(msg) + + async def __aenter__(self) -> Self: + """ + Acquire the lock. + + :return: the lock object + + """ + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + """ + Release the lock. + + :param exc_type: the exception type if raised + :param exc_value: the exception value if raised + :param traceback: the exception traceback if raised + + """ + await self.release() + + def __del__(self) -> None: + """Called when the lock object is deleted.""" + with contextlib.suppress(RuntimeError): + loop = self.loop or asyncio.get_running_loop() + if not loop.is_running(): # pragma: no cover + loop.run_until_complete(self.release(force=True)) + else: + loop.create_task(self.release(force=True)) + + +class AsyncSoftFileLock(SoftFileLock, BaseAsyncFileLock): + """Simply watches the existence of the lock file.""" + + +class AsyncUnixFileLock(UnixFileLock, BaseAsyncFileLock): + """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems.""" + + +class AsyncWindowsFileLock(WindowsFileLock, BaseAsyncFileLock): + """Uses the :func:`msvcrt.locking` to hard lock the lock file on windows systems.""" + + +__all__ = [ + "AsyncAcquireReturnProxy", + "AsyncSoftFileLock", + "AsyncUnixFileLock", + "AsyncWindowsFileLock", + "BaseAsyncFileLock", +] diff --git a/.venv/Lib/site-packages/filelock/py.typed b/.venv/Lib/site-packages/filelock/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/filelock/version.py b/.venv/Lib/site-packages/filelock/version.py new file mode 100644 index 00000000..68cfbf97 --- /dev/null +++ b/.venv/Lib/site-packages/filelock/version.py @@ -0,0 +1,21 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '3.18.0' +__version_tuple__ = version_tuple = (3, 18, 0) diff --git a/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/INSTALLER b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/LICENSE.txt b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/LICENSE.txt new file mode 100644 index 00000000..19ca0b3c --- /dev/null +++ b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013-2024 Sebastian Bank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/METADATA b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/METADATA new file mode 100644 index 00000000..28c0f15c --- /dev/null +++ b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/METADATA @@ -0,0 +1,273 @@ +Metadata-Version: 2.1 +Name: graphviz +Version: 0.20.3 +Summary: Simple Python interface for Graphviz +Home-page: https://github.com/xflr6/graphviz +Author: Sebastian Bank +Author-email: sebastian.bank@uni-leipzig.de +License: MIT +Project-URL: Documentation, https://graphviz.readthedocs.io +Project-URL: Changelog, https://graphviz.readthedocs.io/en/latest/changelog.html +Project-URL: Issue Tracker, https://github.com/xflr6/graphviz/issues +Project-URL: CI, https://github.com/xflr6/graphviz/actions +Project-URL: Coverage, https://codecov.io/gh/xflr6/graphviz +Keywords: graph visualization dot render +Platform: any +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Scientific/Engineering :: Visualization +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +Provides-Extra: dev +Requires-Dist: tox >=3 ; extra == 'dev' +Requires-Dist: flake8 ; extra == 'dev' +Requires-Dist: pep8-naming ; extra == 'dev' +Requires-Dist: wheel ; extra == 'dev' +Requires-Dist: twine ; extra == 'dev' +Provides-Extra: docs +Requires-Dist: sphinx <7,>=5 ; extra == 'docs' +Requires-Dist: sphinx-autodoc-typehints ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme ; extra == 'docs' +Provides-Extra: test +Requires-Dist: pytest <8.1,>=7 ; extra == 'test' +Requires-Dist: pytest-mock >=3 ; extra == 'test' +Requires-Dist: pytest-cov ; extra == 'test' +Requires-Dist: coverage ; extra == 'test' + +Graphviz +======== + +|PyPI version| |License| |Supported Python| |Wheel| |Downloads| + +|Build| |Codecov| |Readthedocs-stable| |Readthedocs-latest| + +|Binder-stable| + +This package facilitates the creation and rendering of graph descriptions in +the DOT_ language of the Graphviz_ graph drawing software (`upstream repo`_) +from Python. + +Create a graph object, assemble the graph by adding nodes and edges, and +retrieve its DOT source code string. Save the source code to a file and render +it with the Graphviz installation of your system. + +Use the ``view`` option/method to directly inspect the resulting (PDF, PNG, +SVG, etc.) file with its default application. Graphs can also be rendered +and displayed within `Jupyter notebooks`_ (formerly known as +`IPython notebooks`_, +`example `_, `nbviewer `_) +as well as the `Jupyter QtConsole`_. + + +Links +----- + +- GitHub: https://github.com/xflr6/graphviz +- PyPI: https://pypi.org/project/graphviz/ +- Documentation: https://graphviz.readthedocs.io +- Changelog: https://graphviz.readthedocs.io/en/latest/changelog.html +- Issue Tracker: https://github.com/xflr6/graphviz/issues +- Download: https://pypi.org/project/graphviz/#files + + +Installation +------------ + +This package runs under Python 3.8+, use pip_ to install: + +.. code:: bash + + $ pip install graphviz + +To render the generated DOT source code, you also need to install Graphviz_ +(`download page `_, +`archived versions `_, +`installation procedure for Windows `_). + +Make sure that the directory containing the ``dot`` executable is on your +systems' ``PATH`` +(sometimes done by the installer; +setting ``PATH`` +on `Linux `_, +`Mac `_, +and `Windows `_). + +Anaconda_: see the conda-forge_ package +`conda-forge/python-graphviz `_ +(`feedstock `_), +which should automatically ``conda install`` +`conda-forge/graphviz `_ +(`feedstock `_) as dependency. + + +Quickstart +---------- + +Create a graph object: + +.. code:: python + + >>> import graphviz # doctest: +NO_EXE + >>> dot = graphviz.Digraph(comment='The Round Table') + >>> dot #doctest: +ELLIPSIS + + +Add nodes and edges: + +.. code:: python + + >>> dot.node('A', 'King Arthur') # doctest: +NO_EXE + >>> dot.node('B', 'Sir Bedevere the Wise') + >>> dot.node('L', 'Sir Lancelot the Brave') + + >>> dot.edges(['AB', 'AL']) + >>> dot.edge('B', 'L', constraint='false') + +Check the generated source code: + +.. code:: python + + >>> print(dot.source) # doctest: +NORMALIZE_WHITESPACE +NO_EXE + // The Round Table + digraph { + A [label="King Arthur"] + B [label="Sir Bedevere the Wise"] + L [label="Sir Lancelot the Brave"] + A -> B + A -> L + B -> L [constraint=false] + } + +Save and render the source code (skip/ignore any ``doctest_mark_exe()`` lines): + +.. code:: python + + >>> doctest_mark_exe() # skip this line + + >>> dot.render('doctest-output/round-table.gv').replace('\\', '/') + 'doctest-output/round-table.gv.pdf' + +Save and render and view the result: + +.. code:: python + + >>> doctest_mark_exe() # skip this line + + >>> dot.render('doctest-output/round-table.gv', view=True) # doctest: +SKIP + 'doctest-output/round-table.gv.pdf' + +.. image:: https://raw.github.com/xflr6/graphviz/master/docs/_static/round-table.svg + :align: center + :alt: round-table.svg + +**Caveat:** +Backslash-escapes and strings of the form ``<...>`` +have a special meaning in the DOT language. +If you need to render arbitrary strings (e.g. from user input), +check the details in the `user guide`_. + + +See also +-------- + +- pygraphviz_ |--| full-blown interface wrapping the Graphviz C library with SWIG +- graphviz-python_ |--| official Python bindings + (`documentation `_) +- pydot_ |--| stable pure-Python approach, requires pyparsing + + +License +------- + +This package is distributed under the `MIT license`_. + + +Development +----------- + +- Development documentation: https://graphviz.readthedocs.io/en/latest/development.html +- Release process: https://graphviz.readthedocs.io/en/latest/release_process.html + + +.. _Graphviz: https://www.graphviz.org +.. _DOT: https://www.graphviz.org/doc/info/lang.html +.. _upstream repo: https://gitlab.com/graphviz/graphviz/ +.. _upstream-download: https://www.graphviz.org/download/ +.. _upstream-archived: https://www2.graphviz.org/Archive/stable/ +.. _upstream-windows: https://forum.graphviz.org/t/new-simplified-installation-procedure-on-windows/224 + +.. _set-path-windows: https://www.computerhope.com/issues/ch000549.htm +.. _set-path-linux: https://stackoverflow.com/questions/14637979/how-to-permanently-set-path-on-linux-unix +.. _set-path-darwin: https://stackoverflow.com/questions/22465332/setting-path-environment-variable-in-osx-permanently + +.. _pip: https://pip.pypa.io + +.. _Jupyter notebooks: https://jupyter.org +.. _IPython notebooks: https://ipython.org/notebook.html +.. _Jupyter QtConsole: https://qtconsole.readthedocs.io + +.. _notebook: https://github.com/xflr6/graphviz/blob/master/examples/graphviz-notebook.ipynb +.. _notebook-nbviewer: https://nbviewer.org/github/xflr6/graphviz/blob/master/examples/graphviz-notebook.ipynb + +.. _Anaconda: https://docs.anaconda.com/anaconda/install/ +.. _conda-forge: https://conda-forge.org +.. _conda-forge-python-graphviz: https://anaconda.org/conda-forge/python-graphviz +.. _conda-forge-python-graphviz-feedstock: https://github.com/conda-forge/python-graphviz-feedstock +.. _conda-forge-graphviz: https://anaconda.org/conda-forge/graphviz +.. _conda-forge-graphviz-feedstock: https://github.com/conda-forge/graphviz-feedstock + +.. _user guide: https://graphviz.readthedocs.io/en/stable/manual.html + +.. _pygraphviz: https://pypi.org/project/pygraphviz/ +.. _graphviz-python: https://pypi.org/project/graphviz-python/ +.. _graphviz-python-docs: https://www.graphviz.org/pdf/gv.3python.pdf +.. _pydot: https://pypi.org/project/pydot/ + +.. _MIT license: https://opensource.org/licenses/MIT + + +.. |--| unicode:: U+2013 + + +.. |PyPI version| image:: https://img.shields.io/pypi/v/graphviz.svg + :target: https://pypi.org/project/graphviz/ + :alt: Latest PyPI Version +.. |License| image:: https://img.shields.io/pypi/l/graphviz.svg + :target: https://github.com/xflr6/graphviz/blob/master/LICENSE.txt + :alt: License +.. |Supported Python| image:: https://img.shields.io/pypi/pyversions/graphviz.svg + :target: https://pypi.org/project/graphviz/ + :alt: Supported Python Versions +.. |Wheel| image:: https://img.shields.io/pypi/wheel/graphviz.svg + :target: https://pypi.org/project/graphviz/#files + :alt: Wheel format +.. |Downloads| image:: https://img.shields.io/pypi/dm/graphviz.svg + :target: https://pypistats.org/packages/graphviz + :alt: Monthly downloads + +.. |Build| image:: https://github.com/xflr6/graphviz/actions/workflows/build.yaml/badge.svg?branch=master + :target: https://github.com/xflr6/graphviz/actions/workflows/build.yaml?query=branch%3Amaster + :alt: Build +.. |Codecov| image:: https://codecov.io/gh/xflr6/graphviz/branch/master/graph/badge.svg + :target: https://codecov.io/gh/xflr6/graphviz + :alt: Codecov +.. |Readthedocs-stable| image:: https://readthedocs.org/projects/graphviz/badge/?version=stable + :target: https://graphviz.readthedocs.io/en/stable/ + :alt: Readthedocs (stable) +.. |Readthedocs-latest| image:: https://readthedocs.org/projects/graphviz/badge/?version=latest + :target: https://graphviz.readthedocs.io/en/latest/ + :alt: Readthedocs (latest) + +.. |Binder-stable| image:: https://img.shields.io/badge/launch-binder%20(stable)-579ACA.svg?logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAFkAAABZCAMAAABi1XidAAAB8lBMVEX///9XmsrmZYH1olJXmsr1olJXmsrmZYH1olJXmsr1olJXmsrmZYH1olL1olJXmsr1olJXmsrmZYH1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olJXmsrmZYH1olL1olL0nFf1olJXmsrmZYH1olJXmsq8dZb1olJXmsrmZYH1olJXmspXmspXmsr1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olLeaIVXmsrmZYH1olL1olL1olJXmsrmZYH1olLna31Xmsr1olJXmsr1olJXmsrmZYH1olLqoVr1olJXmsr1olJXmsrmZYH1olL1olKkfaPobXvviGabgadXmsqThKuofKHmZ4Dobnr1olJXmsr1olJXmspXmsr1olJXmsrfZ4TuhWn1olL1olJXmsqBi7X1olJXmspZmslbmMhbmsdemsVfl8ZgmsNim8Jpk8F0m7R4m7F5nLB6jbh7jbiDirOEibOGnKaMhq+PnaCVg6qWg6qegKaff6WhnpKofKGtnomxeZy3noG6dZi+n3vCcpPDcpPGn3bLb4/Mb47UbIrVa4rYoGjdaIbeaIXhoWHmZYHobXvpcHjqdHXreHLroVrsfG/uhGnuh2bwj2Hxk17yl1vzmljzm1j0nlX1olL3AJXWAAAAbXRSTlMAEBAQHx8gICAuLjAwMDw9PUBAQEpQUFBXV1hgYGBkcHBwcXl8gICAgoiIkJCQlJicnJ2goKCmqK+wsLC4usDAwMjP0NDQ1NbW3Nzg4ODi5+3v8PDw8/T09PX29vb39/f5+fr7+/z8/Pz9/v7+zczCxgAABC5JREFUeAHN1ul3k0UUBvCb1CTVpmpaitAGSLSpSuKCLWpbTKNJFGlcSMAFF63iUmRccNG6gLbuxkXU66JAUef/9LSpmXnyLr3T5AO/rzl5zj137p136BISy44fKJXuGN/d19PUfYeO67Znqtf2KH33Id1psXoFdW30sPZ1sMvs2D060AHqws4FHeJojLZqnw53cmfvg+XR8mC0OEjuxrXEkX5ydeVJLVIlV0e10PXk5k7dYeHu7Cj1j+49uKg7uLU61tGLw1lq27ugQYlclHC4bgv7VQ+TAyj5Zc/UjsPvs1sd5cWryWObtvWT2EPa4rtnWW3JkpjggEpbOsPr7F7EyNewtpBIslA7p43HCsnwooXTEc3UmPmCNn5lrqTJxy6nRmcavGZVt/3Da2pD5NHvsOHJCrdc1G2r3DITpU7yic7w/7Rxnjc0kt5GC4djiv2Sz3Fb2iEZg41/ddsFDoyuYrIkmFehz0HR2thPgQqMyQYb2OtB0WxsZ3BeG3+wpRb1vzl2UYBog8FfGhttFKjtAclnZYrRo9ryG9uG/FZQU4AEg8ZE9LjGMzTmqKXPLnlWVnIlQQTvxJf8ip7VgjZjyVPrjw1te5otM7RmP7xm+sK2Gv9I8Gi++BRbEkR9EBw8zRUcKxwp73xkaLiqQb+kGduJTNHG72zcW9LoJgqQxpP3/Tj//c3yB0tqzaml05/+orHLksVO+95kX7/7qgJvnjlrfr2Ggsyx0eoy9uPzN5SPd86aXggOsEKW2Prz7du3VID3/tzs/sSRs2w7ovVHKtjrX2pd7ZMlTxAYfBAL9jiDwfLkq55Tm7ifhMlTGPyCAs7RFRhn47JnlcB9RM5T97ASuZXIcVNuUDIndpDbdsfrqsOppeXl5Y+XVKdjFCTh+zGaVuj0d9zy05PPK3QzBamxdwtTCrzyg/2Rvf2EstUjordGwa/kx9mSJLr8mLLtCW8HHGJc2R5hS219IiF6PnTusOqcMl57gm0Z8kanKMAQg0qSyuZfn7zItsbGyO9QlnxY0eCuD1XL2ys/MsrQhltE7Ug0uFOzufJFE2PxBo/YAx8XPPdDwWN0MrDRYIZF0mSMKCNHgaIVFoBbNoLJ7tEQDKxGF0kcLQimojCZopv0OkNOyWCCg9XMVAi7ARJzQdM2QUh0gmBozjc3Skg6dSBRqDGYSUOu66Zg+I2fNZs/M3/f/Grl/XnyF1Gw3VKCez0PN5IUfFLqvgUN4C0qNqYs5YhPL+aVZYDE4IpUk57oSFnJm4FyCqqOE0jhY2SMyLFoo56zyo6becOS5UVDdj7Vih0zp+tcMhwRpBeLyqtIjlJKAIZSbI8SGSF3k0pA3mR5tHuwPFoa7N7reoq2bqCsAk1HqCu5uvI1n6JuRXI+S1Mco54YmYTwcn6Aeic+kssXi8XpXC4V3t7/ADuTNKaQJdScAAAAAElFTkSuQmCC + :target: https://mybinder.org/v2/gh/xflr6/graphviz/stable + :alt: Binder (stable) diff --git a/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/RECORD b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/RECORD new file mode 100644 index 00000000..aecd7ae7 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/RECORD @@ -0,0 +1,72 @@ +graphviz-0.20.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +graphviz-0.20.3.dist-info/LICENSE.txt,sha256=75XjRK9OrJmawwWuAZCHYHq9KhBZc1tQpcxttWuwPBs,1107 +graphviz-0.20.3.dist-info/METADATA,sha256=JXy_vx9EgT4fWRmI2btJz6Qz_LX5rm0RT8FNb3FNSPA,12428 +graphviz-0.20.3.dist-info/RECORD,, +graphviz-0.20.3.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +graphviz-0.20.3.dist-info/top_level.txt,sha256=Ee90R0icUEXS0AvIJbqVuNvagnakuhP5Wips0LDkQuw,9 +graphviz/__init__.py,sha256=d0oeZLnATYk63rJuwQcdWh_9-zvvMYwm57WzSU4ZZ2Q,3504 +graphviz/__pycache__/__init__.cpython-312.pyc,, +graphviz/__pycache__/_compat.cpython-312.pyc,, +graphviz/__pycache__/_defaults.cpython-312.pyc,, +graphviz/__pycache__/_tools.cpython-312.pyc,, +graphviz/__pycache__/base.cpython-312.pyc,, +graphviz/__pycache__/copying.cpython-312.pyc,, +graphviz/__pycache__/dot.cpython-312.pyc,, +graphviz/__pycache__/encoding.cpython-312.pyc,, +graphviz/__pycache__/exceptions.cpython-312.pyc,, +graphviz/__pycache__/graphs.cpython-312.pyc,, +graphviz/__pycache__/jupyter_integration.cpython-312.pyc,, +graphviz/__pycache__/piping.cpython-312.pyc,, +graphviz/__pycache__/quoting.cpython-312.pyc,, +graphviz/__pycache__/rendering.cpython-312.pyc,, +graphviz/__pycache__/saving.cpython-312.pyc,, +graphviz/__pycache__/sources.cpython-312.pyc,, +graphviz/__pycache__/unflattening.cpython-312.pyc,, +graphviz/_compat.py,sha256=fuTISzdlIDZjcJuA9Yw5LBKsgVKd2Hq5gvCfYDZwwW0,1127 +graphviz/_defaults.py,sha256=B1YQTDzyep8Aw_xNfeW69XnWfHuMfAmngs7n9KcltBM,2255 +graphviz/_tools.py,sha256=-I7hBcwjSv8qGtGRnFEooze8Jl1qGy5OEt5RFbuqG0s,6069 +graphviz/backend/__init__.py,sha256=i9582Ayyo46FN-7qMG4gaC9_wXJa8lpB27cbf1UMdZE,778 +graphviz/backend/__pycache__/__init__.cpython-312.pyc,, +graphviz/backend/__pycache__/dot_command.cpython-312.pyc,, +graphviz/backend/__pycache__/execute.cpython-312.pyc,, +graphviz/backend/__pycache__/mixins.cpython-312.pyc,, +graphviz/backend/__pycache__/piping.cpython-312.pyc,, +graphviz/backend/__pycache__/rendering.cpython-312.pyc,, +graphviz/backend/__pycache__/unflattening.cpython-312.pyc,, +graphviz/backend/__pycache__/upstream_version.cpython-312.pyc,, +graphviz/backend/__pycache__/viewing.cpython-312.pyc,, +graphviz/backend/dot_command.py,sha256=xCcGNEmA48fyMcLEbbyq-CSo14j4H2_vZ7RayWon4zU,1473 +graphviz/backend/execute.py,sha256=aOpkVJGZbWdu8RDPylGc-j5Umv4OVPJyRH15cGgzke0,4458 +graphviz/backend/mixins.py,sha256=87fGYqR40hFQCp7c-1lZa-Ab-OzLFVR6W9XpjFNObjA,2319 +graphviz/backend/piping.py,sha256=-1LfE4wLQuZE25uMiCwP11YCy3C5eIrw8pGKUs93sVY,8982 +graphviz/backend/rendering.py,sha256=sH5tqRziduMNI7xawhoiSHCaR8u5SndYN1t1ZgsWCbA,13409 +graphviz/backend/unflattening.py,sha256=hpPSkK3lbsZYBXCX80kcAiBxIfN-3QoWT5w5eQBcXYQ,2140 +graphviz/backend/upstream_version.py,sha256=_f9_LslTbyOj6qdHcZMeJ-7h5YvYABXHVzUIhfvgUzg,2015 +graphviz/backend/viewing.py,sha256=tit9D2IsRMdRd0zsh3UMn-1lbRK_G1-jEnlBoFlrZvE,2255 +graphviz/base.py,sha256=8ec9cCmF5qlFcOL8PLiB-XYSx8uyDlL3L_Sl_pXi_hQ,965 +graphviz/copying.py,sha256=TVz3GUC4-z_T0bEUFUNwzUDKWBnlcjpkbr3Gp4xlLmU,565 +graphviz/dot.py,sha256=M4SrcuQx2KFWssxjtaC5y5FZGSrMN04km0y_Ulf6m34,13047 +graphviz/encoding.py,sha256=LVp7W5rAI-8bgaX_QRci7NfoUJ5PXNKXJbuPLIasQfo,1107 +graphviz/exceptions.py,sha256=XinkoZh9NDTwVYT4-2qp-p6ZKlmVkYChokbM-WLkH3g,1073 +graphviz/graphs.py,sha256=ntqazhm1MD_43tmijzv44Sf7lZPnCmbC1CmunV598lU,4425 +graphviz/jupyter_integration.py,sha256=5aTzWnWNF-C40cq2ohkJ84f-putNyDCkZ6D8aGvsqsM,4451 +graphviz/parameters/__init__.py,sha256=xQE5N2FDaTML0MmZP1J8ETD4wDMSTicgwflL05Q4YIM,482 +graphviz/parameters/__pycache__/__init__.cpython-312.pyc,, +graphviz/parameters/__pycache__/base.cpython-312.pyc,, +graphviz/parameters/__pycache__/engines.cpython-312.pyc,, +graphviz/parameters/__pycache__/formats.cpython-312.pyc,, +graphviz/parameters/__pycache__/formatters.cpython-312.pyc,, +graphviz/parameters/__pycache__/mixins.cpython-312.pyc,, +graphviz/parameters/__pycache__/renderers.cpython-312.pyc,, +graphviz/parameters/base.py,sha256=K-t4_oYberxA8ps2KM2Ccrofkl_AsFP26T1R-VBBGjY,465 +graphviz/parameters/engines.py,sha256=rciuGFFO7SjIw9YkYzerhH-j5znF0sdLg0SXWHCf8cM,1716 +graphviz/parameters/formats.py,sha256=yDeNrOhLzXl2gKeBLTee5qmFDrtoodI7zr96Q5P_b2k,2433 +graphviz/parameters/formatters.py,sha256=BYhS7xPOOyaPD7fP9ZenfbfjC3zTN15M-H6hqTvfxFs,1867 +graphviz/parameters/mixins.py,sha256=BDv50E7rsIqxwj01qSA6GJZyCObu6miCh0w69q3q-ao,1446 +graphviz/parameters/renderers.py,sha256=7m8pHTXNp_6ZVwmgOJzqqCiOrg1GHs29nluaC9sEA0U,2001 +graphviz/piping.py,sha256=s922g1uDOHxXj0HJFXX3NU1B6xP8-E4QpYBgCUdZzFM,7137 +graphviz/quoting.py,sha256=7dd1aMLNP62cuC2vVpaHMb648UII3bP93lvDUcq2cFs,6564 +graphviz/rendering.py,sha256=w7QJ0iU_z3965Zz7fEp7ttB1IDyL71fT7bCCJ7bNh2I,8198 +graphviz/saving.py,sha256=uSkc69DEG6FnPZLuej8x4uoT1nIcnSLZTDdhAbf2yDY,2738 +graphviz/sources.py,sha256=R_hCDVyrP18yQpfuOAj2cDiBiRYAUfrw2HXQrm1EXdY,6181 +graphviz/unflattening.py,sha256=156nuXv3EKYF1gP2EYEhyJ2MAVveebUtiyOXFhhe0EA,2498 diff --git a/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/WHEEL b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/WHEEL new file mode 100644 index 00000000..bab98d67 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/top_level.txt b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/top_level.txt new file mode 100644 index 00000000..4d956093 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz-0.20.3.dist-info/top_level.txt @@ -0,0 +1 @@ +graphviz diff --git a/.venv/Lib/site-packages/graphviz/__init__.py b/.venv/Lib/site-packages/graphviz/__init__.py new file mode 100644 index 00000000..762e88b0 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/__init__.py @@ -0,0 +1,109 @@ +# graphviz - create dot, save, render, view + +"""Assemble DOT source code and render it with Graphviz. + +Example: + >>> import graphviz # doctest: +NO_EXE + >>> dot = graphviz.Digraph(comment='The Round Table') + + >>> dot.node('A', 'King Arthur') + >>> dot.node('B', 'Sir Bedevere the Wise') + >>> dot.node('L', 'Sir Lancelot the Brave') + + >>> dot.edges(['AB', 'AL']) + + >>> dot.edge('B', 'L', constraint='false') + + >>> print(dot) #doctest: +NORMALIZE_WHITESPACE + // The Round Table + digraph { + A [label="King Arthur"] + B [label="Sir Bedevere the Wise"] + L [label="Sir Lancelot the Brave"] + A -> B + A -> L + B -> L [constraint=false] + } +""" + +from ._defaults import set_default_engine, set_default_format, set_jupyter_format + +from .backend import (DOT_BINARY, UNFLATTEN_BINARY, + render, pipe, pipe_string, pipe_lines, pipe_lines_string, + unflatten, version, view) +from .exceptions import (ExecutableNotFound, CalledProcessError, + RequiredArgumentError, FileExistsError, + UnknownSuffixWarning, FormatSuffixMismatchWarning, + DotSyntaxWarning) +from .graphs import Graph, Digraph +from .jupyter_integration import SUPPORTED_JUPYTER_FORMATS +from .parameters import ENGINES, FORMATS, RENDERERS, FORMATTERS +from .quoting import escape, nohtml +from .sources import Source + +__all__ = ['ENGINES', 'FORMATS', 'RENDERERS', 'FORMATTERS', + 'DOT_BINARY', 'UNFLATTEN_BINARY', + 'SUPPORTED_JUPYTER_FORMATS', + 'Graph', 'Digraph', + 'Source', + 'escape', 'nohtml', + 'render', 'pipe', 'pipe_string', 'pipe_lines', 'pipe_lines_string', + 'unflatten', 'version', 'view', + 'ExecutableNotFound', 'CalledProcessError', + 'RequiredArgumentError', 'FileExistsError', + 'UnknownSuffixWarning', 'FormatSuffixMismatchWarning', + 'DotSyntaxWarning', + 'set_default_engine', 'set_default_format', 'set_jupyter_format'] + +__title__ = 'graphviz' +__version__ = '0.20.3' +__author__ = 'Sebastian Bank ' +__license__ = 'MIT, see LICENSE.txt' +__copyright__ = 'Copyright (c) 2013-2024 Sebastian Bank' + +ENGINES = ENGINES +""":class:`set` of known layout commands used for rendering +(``'dot'``, ``'neato'``, ...).""" + +FORMATS = FORMATS +""":class:`set` of known output formats for rendering +(``'pdf'``, ``'png'``, ...).""" + +RENDERERS = RENDERERS +""":class:`set` of known output renderers for rendering +(``'cairo'``, ``'gd'``, ...).""" + +FORMATTERS = FORMATTERS +""":class:`set` of known output formatters for rendering +(``'cairo'``, ``'gd'``, ...).""" + +SUPPORTED_JUPYTER_FORMATS = SUPPORTED_JUPYTER_FORMATS +""":class:`set` of supported formats for ``_repr_mimebundle_()`` +(``'svg'``, ``'png'``, ...).""" + +DOT_BINARY = DOT_BINARY +""":class:`pathlib.Path` of rendering command (``Path('dot')``).""" + +UNFLATTEN_BINARY = UNFLATTEN_BINARY +""":class:`pathlib.Path` of unflatten command (``Path('unflatten')``).""" + + +ExecutableNotFound = ExecutableNotFound + + +CalledProcessError = CalledProcessError + + +RequiredArgumentError = RequiredArgumentError + + +FileExistsError = FileExistsError + + +UnknownSuffixWarning = UnknownSuffixWarning + + +FormatSuffixMismatchWarning = FormatSuffixMismatchWarning + + +DotSyntaxWarning = DotSyntaxWarning diff --git a/.venv/Lib/site-packages/graphviz/_compat.py b/.venv/Lib/site-packages/graphviz/_compat.py new file mode 100644 index 00000000..332ff64e --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/_compat.py @@ -0,0 +1,33 @@ +"""Python 3.8 compatibility and platform compatibility.""" + +import platform +import sys + +if sys.version_info < (3, 9): # pragma: no cover + # pytype not supported + import unittest.mock + + Literal = unittest.mock.MagicMock(name='Literal') +else: # pragma: no cover + from typing import Literal + + Literal = Literal # CAVEAT: use None instead of Literal[None] + + +def get_startupinfo() -> None: + """Return None for startupinfo argument of ``subprocess.Popen``.""" + return None + + +assert get_startupinfo() is None, 'get_startupinfo() defaults to a no-op' + + +if platform.system() == 'Windows': # pragma: no cover + import subprocess + + def get_startupinfo() -> subprocess.STARTUPINFO: # pytype: disable=module-attr + """Return subprocess.STARTUPINFO instance hiding the console window.""" + startupinfo = subprocess.STARTUPINFO() # pytype: disable=module-attr + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW # pytype: disable=module-attr + startupinfo.wShowWindow = subprocess.SW_HIDE # pytype: disable=module-attr + return startupinfo diff --git a/.venv/Lib/site-packages/graphviz/_defaults.py b/.venv/Lib/site-packages/graphviz/_defaults.py new file mode 100644 index 00000000..5f0144d2 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/_defaults.py @@ -0,0 +1,70 @@ +"""Set package-wide default parameters and IPython/Jupyter display format.""" + +__all_ = ['DEFAULT_SOURCE_EXTENSION', + 'set_default_engine', 'set_default_format', 'set_jupyter_format'] + +DEFAULT_SOURCE_EXTENSION = 'gv' + + +def set_default_engine(engine: str) -> str: + """Change the default ``engine`` and return the old default value. + + Args: + engine: new default ``engine`` + used by all present and newly created instances + without explicitly set ``engine`` + (``'dot'``, ``'neato'``, ...). + + Returns: + The old default value used for ``engine``. + """ + from . import parameters + + parameters.verify_engine(engine) + + old_default_engine = parameters.Parameters._engine + parameters.Parameters._engine = engine + return old_default_engine + + +def set_default_format(format: str) -> str: + """Change the default ``format`` and return the old default value. + + Args: + format: new default ``format`` + used by all present and newly created instances + without explicitly set ``format`` + (``'pdf'``, ``'png'``, ...). + + Returns: + The old default value used for ``format``. + """ + from . import parameters + + parameters.verify_format(format) + + old_default_format = parameters.Parameters._format + parameters.Parameters._format = format + return old_default_format + + +def set_jupyter_format(jupyter_format: str) -> str: + """Change the default mimetype format for ``_repr_mimebundle_()`` and return the old value. + + Args: + jupyter_format: new default IPython/Jupyter display format + used by all present and newly created instances + (``'svg'``, ``'png'``, ...). + + Returns: + The old default value used for IPython/Jupyter display format. + """ + from . import jupyter_integration + + mimetype = jupyter_integration.get_jupyter_format_mimetype(jupyter_format) + + old_mimetype = jupyter_integration.JupyterIntegration._jupyter_mimetype + old_format = jupyter_integration.get_jupyter_mimetype_format(old_mimetype) + + jupyter_integration.JupyterIntegration._jupyter_mimetype = mimetype + return old_format diff --git a/.venv/Lib/site-packages/graphviz/_tools.py b/.venv/Lib/site-packages/graphviz/_tools.py new file mode 100644 index 00000000..018425c9 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/_tools.py @@ -0,0 +1,175 @@ +"""Generic re-useable self-contained helper functions.""" + +import functools +import inspect +import itertools +import logging +import os +import pathlib +import typing +import warnings + +__all__ = ['attach', + 'mkdirs', + 'mapping_items', + 'promote_pathlike', + 'promote_pathlike_directory', + 'deprecate_positional_args'] + + +log = logging.getLogger(__name__) + + +def attach(object: typing.Any, /, name: str) -> typing.Callable: + """Return a decorator doing ``setattr(object, name)`` with its argument. + + >>> spam = type('Spam', (object,), {})() # doctest: +NO_EXE + + >>> @attach(spam, 'eggs') + ... def func(): + ... pass + + >>> spam.eggs # doctest: +ELLIPSIS + + """ + def decorator(func): + setattr(object, name, func) + return func + + return decorator + + +def mkdirs(filename: typing.Union[os.PathLike, str], /, *, mode: int = 0o777) -> None: + """Recursively create directories up to the path of ``filename`` + as needed.""" + dirname = os.path.dirname(filename) + if not dirname: + return + log.debug('os.makedirs(%r)', dirname) + os.makedirs(dirname, mode=mode, exist_ok=True) + + +def mapping_items(mapping, /): + """Return an iterator over the ``mapping`` items, + sort if it's a plain dict. + + >>> list(mapping_items({'spam': 0, 'ham': 1, 'eggs': 2})) # doctest: +NO_EXE + [('eggs', 2), ('ham', 1), ('spam', 0)] + + >>> from collections import OrderedDict + >>> list(mapping_items(OrderedDict(enumerate(['spam', 'ham', 'eggs'])))) + [(0, 'spam'), (1, 'ham'), (2, 'eggs')] + """ + result = iter(mapping.items()) + if type(mapping) is dict: + result = iter(sorted(result)) + return result + + +@typing.overload +def promote_pathlike(filepath: typing.Union[os.PathLike, str], /) -> pathlib.Path: + """Return path object for path-like-object.""" + + +@typing.overload +def promote_pathlike(filepath: None, /) -> None: + """Return None for None.""" + + +@typing.overload +def promote_pathlike(filepath: typing.Union[os.PathLike, str, None], /, + ) -> typing.Optional[pathlib.Path]: + """Return path object or ``None`` depending on ``filepath``.""" + + +def promote_pathlike(filepath: typing.Union[os.PathLike, str, None] + ) -> typing.Optional[pathlib.Path]: + """Return path-like object ``filepath`` promoted into a path object. + + See also: + https://docs.python.org/3/glossary.html#term-path-like-object + """ + return pathlib.Path(filepath) if filepath is not None else None + + +def promote_pathlike_directory(directory: typing.Union[os.PathLike, str, None], /, *, + default: typing.Union[os.PathLike, str, None] = None, + ) -> pathlib.Path: + """Return path-like object ``directory`` promoted into a path object (default to ``os.curdir``). + + See also: + https://docs.python.org/3/glossary.html#term-path-like-object + """ + return pathlib.Path(directory if directory is not None + else default or os.curdir) + + +def deprecate_positional_args(*, + supported_number: int, + category: typing.Type[Warning] = PendingDeprecationWarning, + stacklevel: int = 1): + """Mark supported_number of positional arguments as the maximum. + + Args: + supported_number: Number of positional arguments + for which no warning is raised. + category: Type of Warning to raise + or None to return a nulldecorator + returning the undecorated function. + stacklevel: See :func:`warning.warn`. + + Returns: + Return a decorator raising a category warning + on more than supported_number positional args. + + See also: + https://docs.python.org/3/library/exceptions.html#FutureWarning + https://docs.python.org/3/library/exceptions.html#DeprecationWarning + https://docs.python.org/3/library/exceptions.html#PendingDeprecationWarning + """ + assert supported_number > 0, f'supported_number at least one: {supported_number!r}' + + if category is None: + def nulldecorator(func): + """Return the undecorated function.""" + return func + + return nulldecorator + + assert issubclass(category, Warning) + + stacklevel += 1 + + def decorator(func): + signature = inspect.signature(func) + argnames = [name for name, param in signature.parameters.items() + if param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD] + log.debug('deprecate positional args: %s.%s(%r)', + func.__module__, func.__qualname__, + argnames[supported_number:]) + + @functools.wraps(func) + def wrapper(*args, **kwargs): + if len(args) > supported_number: + call_args = zip(argnames, args) + supported = itertools.islice(call_args, supported_number) + supported = dict(supported) + deprecated = dict(call_args) + assert deprecated + func_name = func.__name__.lstrip('_') + func_name, sep, rest = func_name.partition('_legacy') + assert not set or not rest + wanted = ', '.join(f'{name}={value!r}' + for name, value in deprecated.items()) + warnings.warn(f'The signature of {func.__name__} will be reduced' + f' to {supported_number} positional args' + f' {list(supported)}: pass {wanted}' + ' as keyword arg(s)', + stacklevel=stacklevel, + category=category) + + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/.venv/Lib/site-packages/graphviz/backend/__init__.py b/.venv/Lib/site-packages/graphviz/backend/__init__.py new file mode 100644 index 00000000..f5b6677e --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/backend/__init__.py @@ -0,0 +1,20 @@ +"""Execute rendering and unflattening subprocesses, open files in viewer.""" + +from .dot_command import DOT_BINARY +from .execute import ExecutableNotFound, CalledProcessError +from .mixins import Render, Pipe, Unflatten, View +from .piping import pipe, pipe_string, pipe_lines, pipe_lines_string +from .rendering import render +from .unflattening import UNFLATTEN_BINARY, unflatten +from .upstream_version import version +from .viewing import view + +__all__ = ['DOT_BINARY', 'UNFLATTEN_BINARY', + 'render', + 'pipe', 'pipe_string', + 'pipe_lines', 'pipe_lines_string', + 'unflatten', + 'version', + 'view', + 'ExecutableNotFound', 'CalledProcessError', + 'Render', 'Pipe', 'Unflatten', 'View'] diff --git a/.venv/Lib/site-packages/graphviz/backend/dot_command.py b/.venv/Lib/site-packages/graphviz/backend/dot_command.py new file mode 100644 index 00000000..3a62a851 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/backend/dot_command.py @@ -0,0 +1,44 @@ +"""Check and assemble commands for running Graphviz ``dot``.""" + +import os +import pathlib +import typing + +from .. import exceptions +from .. import parameters + +__all__ = ['DOT_BINARY', 'command'] + +DOT_BINARY = pathlib.Path('dot') + + +def command(engine: str, format_: str, *, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None, + neato_no_op: typing.Union[bool, int, None] = None + ) -> typing.List[typing.Union[os.PathLike, str]]: + """Return ``subprocess.Popen`` argument list for rendering. + + See also: + Upstream documentation: + - https://www.graphviz.org/doc/info/command.html#-K + - https://www.graphviz.org/doc/info/command.html#-T + - https://www.graphviz.org/doc/info/command.html#-n + """ + if formatter is not None and renderer is None: + raise exceptions.RequiredArgumentError('formatter given without renderer') + + parameters.verify_engine(engine, required=True) + parameters.verify_format(format_, required=True) + parameters.verify_renderer(renderer, required=False) + parameters.verify_formatter(formatter, required=False) + + output_format = [f for f in (format_, renderer, formatter) if f is not None] + output_format_flag = ':'.join(output_format) + + cmd = [DOT_BINARY, f'-K{engine}', f'-T{output_format_flag}'] + + if neato_no_op: + cmd.append(f'-n{neato_no_op:d}') + + return cmd diff --git a/.venv/Lib/site-packages/graphviz/backend/execute.py b/.venv/Lib/site-packages/graphviz/backend/execute.py new file mode 100644 index 00000000..a3447ff1 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/backend/execute.py @@ -0,0 +1,132 @@ +"""Run subprocesses with ``subprocess.run()`` and ``subprocess.Popen()``.""" + +import errno +import logging +import os +import subprocess +import sys +import typing + +from .. import _compat + +__all__ = ['run_check', 'ExecutableNotFound', 'CalledProcessError'] + + +log = logging.getLogger(__name__) + + +BytesOrStrIterator = typing.Union[typing.Iterator[bytes], + typing.Iterator[str]] + + +@typing.overload +def run_check(cmd: typing.Sequence[typing.Union[os.PathLike, str]], *, + input_lines: typing.Optional[typing.Iterator[bytes]] = ..., + encoding: None = ..., + quiet: bool = ..., + **kwargs) -> subprocess.CompletedProcess: + """Accept bytes input_lines with default ``encoding=None```.""" + + +@typing.overload +def run_check(cmd: typing.Sequence[typing.Union[os.PathLike, str]], *, + input_lines: typing.Optional[typing.Iterator[str]] = ..., + encoding: str, + quiet: bool = ..., + **kwargs) -> subprocess.CompletedProcess: + """Accept string input_lines when given ``encoding``.""" + + +@typing.overload +def run_check(cmd: typing.Sequence[typing.Union[os.PathLike, str]], *, + input_lines: typing.Optional[BytesOrStrIterator] = ..., + encoding: typing.Optional[str] = ..., + capture_output: bool = ..., + quiet: bool = ..., + **kwargs) -> subprocess.CompletedProcess: + """Accept bytes or string input_lines depending on ``encoding``.""" + + +def run_check(cmd: typing.Sequence[typing.Union[os.PathLike, str]], *, + input_lines: typing.Optional[BytesOrStrIterator] = None, + encoding: typing.Optional[str] = None, + quiet: bool = False, + **kwargs) -> subprocess.CompletedProcess: + """Run the command described by ``cmd`` + with ``check=True`` and return its completed process. + + Raises: + CalledProcessError: if the returncode of the subprocess is non-zero. + """ + log.debug('run %r', cmd) + if not kwargs.pop('check', True): # pragma: no cover + raise NotImplementedError('check must be True or omited') + + if encoding is not None: + kwargs['encoding'] = encoding + + kwargs.setdefault('startupinfo', _compat.get_startupinfo()) + + try: + if input_lines is not None: + assert kwargs.get('input') is None + assert iter(input_lines) is input_lines + if kwargs.pop('capture_output'): + kwargs['stdout'] = kwargs['stderr'] = subprocess.PIPE + proc = _run_input_lines(cmd, input_lines, kwargs=kwargs) + else: + proc = subprocess.run(cmd, **kwargs) + except OSError as e: + if e.errno == errno.ENOENT: + raise ExecutableNotFound(cmd) from e + raise + + if not quiet and proc.stderr: + _write_stderr(proc.stderr) + + try: + proc.check_returncode() + except subprocess.CalledProcessError as e: + raise CalledProcessError(*e.args) + + return proc + + +def _run_input_lines(cmd, input_lines, *, kwargs): + popen = subprocess.Popen(cmd, stdin=subprocess.PIPE, **kwargs) + + stdin_write = popen.stdin.write + for line in input_lines: + stdin_write(line) + + stdout, stderr = popen.communicate() + return subprocess.CompletedProcess(popen.args, popen.returncode, + stdout=stdout, stderr=stderr) + + +def _write_stderr(stderr) -> None: + if isinstance(stderr, bytes): + stderr_encoding = (getattr(sys.stderr, 'encoding', None) + or sys.getdefaultencoding()) + stderr = stderr.decode(stderr_encoding) + + sys.stderr.write(stderr) + sys.stderr.flush() + return None + + +class ExecutableNotFound(RuntimeError): + """:exc:`RuntimeError` raised if the Graphviz executable is not found.""" + + _msg = ('failed to execute {!r}, ' + 'make sure the Graphviz executables are on your systems\' PATH') + + def __init__(self, args) -> None: + super().__init__(self._msg.format(*args)) + + +class CalledProcessError(subprocess.CalledProcessError): + """:exc:`~subprocess.CalledProcessError` raised if a subprocess ``returncode`` is not ``0``.""" # noqa: E501 + + def __str__(self) -> 'str': + return f'{super().__str__()} [stderr: {self.stderr!r}]' diff --git a/.venv/Lib/site-packages/graphviz/backend/mixins.py b/.venv/Lib/site-packages/graphviz/backend/mixins.py new file mode 100644 index 00000000..a90e2572 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/backend/mixins.py @@ -0,0 +1,76 @@ +"""Mixin classes used by Base subclasses to inherit backend functionality.""" + +import os +import typing + +from .. import parameters + +from . import piping +from . import rendering +from . import unflattening +from . import viewing + +__all__ = ['Render', 'Pipe', 'Unflatten', 'View'] + + +class Render(parameters.Parameters): + """Parameters for calling and calling ``graphviz.render()``.""" + + def _get_render_parameters(self, + outfile: typing.Union[os.PathLike, str, None] = None, + raise_if_result_exists: bool = False, + overwrite_source: bool = False, + **kwargs): + kwargs = self._get_parameters(**kwargs) + kwargs.update(outfile=outfile, + raise_if_result_exists=raise_if_result_exists, + overwrite_filepath=overwrite_source) + return [kwargs.pop('engine'), kwargs.pop('format')], kwargs + + @property + def _render(_): # noqa: N805 + """Simplify ``._render()`` mocking.""" + return rendering.render + + +class Pipe(parameters.Parameters): + """Parameters for calling and calling ``graphviz.pipe()``.""" + + _get_format = staticmethod(rendering.get_format) + + _get_filepath = staticmethod(rendering.get_filepath) + + def _get_pipe_parameters(self, **kwargs): + kwargs = self._get_parameters(**kwargs) + return [kwargs.pop('engine'), kwargs.pop('format')], kwargs + + @property + def _pipe_lines(_): # noqa: N805 + """Simplify ``._pipe_lines()`` mocking.""" + return piping.pipe_lines + + @property + def _pipe_lines_string(_): # noqa: N805 + """Simplify ``._pipe_lines_string()`` mocking.""" + return piping.pipe_lines_string + + +class Unflatten: + + @property + def _unflatten(_): # noqa: N805 + """Simplify ``._unflatten mocking.""" + return unflattening.unflatten + + +class View: + """Open filepath with its default viewing application + (platform-specific).""" + + _view_darwin = staticmethod(viewing.view_darwin) + + _view_freebsd = staticmethod(viewing.view_unixoid) + + _view_linux = staticmethod(viewing.view_unixoid) + + _view_windows = staticmethod(viewing.view_windows) diff --git a/.venv/Lib/site-packages/graphviz/backend/piping.py b/.venv/Lib/site-packages/graphviz/backend/piping.py new file mode 100644 index 00000000..e93e03c8 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/backend/piping.py @@ -0,0 +1,213 @@ +"""Pipe bytes, strings, or string iterators through Graphviz ``dot``.""" + +import typing + +from .. import _tools + +from . import dot_command +from . import execute + +__all__ = ['pipe', 'pipe_string', + 'pipe_lines', 'pipe_lines_string'] + + +@_tools.deprecate_positional_args(supported_number=3) +def pipe(engine: str, format: str, data: bytes, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None, + neato_no_op: typing.Union[bool, int, None] = None, + quiet: bool = False) -> bytes: + """Return ``data`` (``bytes``) piped through ``engine`` into ``format`` as ``bytes``. + + Args: + engine: Layout engine for rendering (``'dot'``, ``'neato'``, ...). + format: Output format for rendering (``'pdf'``, ``'png'``, ...). + data: Binary (encoded) DOT source bytes to render. + renderer: Output renderer (``'cairo'``, ``'gd'``, ...). + formatter: Output formatter (``'cairo'``, ``'gd'``, ...). + neato_no_op: Neato layout engine no-op flag. + quiet: Suppress ``stderr`` output from the layout subprocess. + + Returns: + Binary (encoded) stdout of the layout command. + + Raises: + ValueError: If ``engine``, ``format``, ``renderer``, or ``formatter`` + are unknown. + graphviz.RequiredArgumentError: If ``formatter`` is given + but ``renderer`` is None. + graphviz.ExecutableNotFound: If the Graphviz ``dot`` executable + is not found. + graphviz.CalledProcessError: If the returncode (exit status) + of the rendering ``dot`` subprocess is non-zero. + + Example: + >>> doctest_mark_exe() + >>> import graphviz + >>> graphviz.pipe('dot', 'svg', b'graph { hello -- world }')[:14] + b' str: + """Return ``input_string`` piped through ``engine`` into ``format`` as string. + + Args: + engine: Layout engine for rendering (``'dot'``, ``'neato'``, ...). + format: Output format for rendering (``'pdf'``, ``'png'``, ...). + input_string: Binary (encoded) DOT source bytes to render. + encoding: Encoding to en/decode subprocess stdin and stdout (required). + renderer: Output renderer (``'cairo'``, ``'gd'``, ...). + formatter: Output formatter (``'cairo'``, ``'gd'``, ...). + neato_no_op: Neato layout engine no-op flag. + quiet: Suppress ``stderr`` output from the layout subprocess. + + Returns: + Decoded stdout of the layout command. + + Raises: + ValueError: If ``engine``, ``format``, ``renderer``, or ``formatter`` + are unknown. + graphviz.RequiredArgumentError: If ``formatter`` is given + but ``renderer`` is None. + graphviz.ExecutableNotFound: If the Graphviz ``dot`` executable + is not found. + graphviz.CalledProcessError: If the returncode (exit status) + of the rendering ``dot`` subprocess is non-zero. + + Example: + >>> doctest_mark_exe() + >>> import graphviz + >>> graphviz.pipe_string('dot', 'svg', 'graph { spam }', + ... encoding='ascii')[:14] + ' bytes: + r"""Return ``input_lines`` piped through ``engine`` into ``format`` as ``bytes``. + + Args: + engine: Layout engine for rendering (``'dot'``, ``'neato'``, ...). + format: Output format for rendering (``'pdf'``, ``'png'``, ...). + input_lines: DOT source lines to render (including final newline). + input_encoding: Encode input_lines for subprocess stdin (required). + renderer: Output renderer (``'cairo'``, ``'gd'``, ...). + formatter: Output formatter (``'cairo'``, ``'gd'``, ...). + neato_no_op: Neato layout engine no-op flag. + quiet: Suppress ``stderr`` output from the layout subprocess. + + Returns: + Binary stdout of the layout command. + + Raises: + ValueError: If ``engine``, ``format``, ``renderer``, or ``formatter`` + are unknown. + graphviz.RequiredArgumentError: If ``formatter`` is given + but ``renderer`` is None. + graphviz.ExecutableNotFound: If the Graphviz ``dot`` executable + is not found. + graphviz.CalledProcessError: If the returncode (exit status) + of the rendering ``dot`` subprocess is non-zero. + + Example: + >>> doctest_mark_exe() + >>> import graphviz + >>> graphviz.pipe_lines('dot', 'svg', iter(['graph { spam }\n']), + ... input_encoding='ascii')[:14] + b' str: + r"""Return ``input_lines`` piped through ``engine`` into ``format`` as string. + + Args: + engine: Layout engine for rendering (``'dot'``, ``'neato'``, ...). + format: Output format for rendering (``'pdf'``, ``'png'``, ...). + input_lines: DOT source lines to render (including final newline). + encoding: Encoding to en/decode subprocess stdin and stdout (required). + renderer: Output renderer (``'cairo'``, ``'gd'``, ...). + formatter: Output formatter (``'cairo'``, ``'gd'``, ...). + neato_no_op: Neato layout engine no-op flag. + quiet: Suppress ``stderr`` output from the layout subprocess. + + Returns: + Decoded stdout of the layout command. + + Raises: + ValueError: If ``engine``, ``format``, ``renderer``, or ``formatter`` + are unknown. + graphviz.RequiredArgumentError: If ``formatter`` is given + but ``renderer`` is None. + graphviz.ExecutableNotFound: If the Graphviz ``dot`` executable + is not found. + graphviz.CalledProcessError: If the returncode (exit status) + of the rendering ``dot`` subprocess is non-zero. + + Example: + >>> doctest_mark_exe() + >>> import graphviz + >>> graphviz.pipe_lines_string('dot', 'svg', iter(['graph { spam }\n']), + ... encoding='ascii')[:14] + ' str: + """Return format inferred from outfile suffix and/or given ``format``. + + Args: + outfile: Path for the rendered output file. + format: Output format for rendering (``'pdf'``, ``'png'``, ...). + + Returns: + The given ``format`` falling back to the inferred format. + + Warns: + graphviz.UnknownSuffixWarning: If the suffix of ``outfile`` + is empty/unknown. + graphviz.FormatSuffixMismatchWarning: If the suffix of ``outfile`` + does not match the given ``format``. + """ + try: + inferred_format = infer_format(outfile) + except ValueError: + if format is None: + msg = ('cannot infer rendering format' + f' from suffix {outfile.suffix!r}' + f' of outfile: {os.fspath(outfile)!r}' + ' (provide format or outfile with a suffix' + f' from {get_supported_suffixes()!r})') + raise exceptions.RequiredArgumentError(msg) + + warnings.warn(f'unknown outfile suffix {outfile.suffix!r}' + f' (expected: {"." + format!r})', + category=exceptions.UnknownSuffixWarning) + return format + else: + assert inferred_format is not None + if format is not None and format.lower() != inferred_format: + warnings.warn(f'expected format {inferred_format!r} from outfile' + f' differs from given format: {format!r}', + category=exceptions.FormatSuffixMismatchWarning) + return format + + return inferred_format + + +def get_supported_suffixes() -> typing.List[str]: + """Return a sorted list of supported outfile suffixes for exception/warning messages. + + >>> get_supported_suffixes() # doctest: +ELLIPSIS + ['.bmp', ...] + """ + return [f'.{format}' for format in get_supported_formats()] + + +def get_supported_formats() -> typing.List[str]: + """Return a sorted list of supported formats for exception/warning messages. + + >>> get_supported_formats() # doctest: +ELLIPSIS + ['bmp', ...] + """ + return sorted(parameters.FORMATS) + + +def infer_format(outfile: pathlib.Path) -> str: + """Return format inferred from outfile suffix. + + Args: + outfile: Path for the rendered output file. + + Returns: + The inferred format. + + Raises: + ValueError: If the suffix of ``outfile`` is empty/unknown. + + >>> infer_format(pathlib.Path('spam.pdf')) # doctest: +NO_EXE + 'pdf' + + >>> infer_format(pathlib.Path('spam.gv.svg')) + 'svg' + + >>> infer_format(pathlib.Path('spam.PNG')) + 'png' + + >>> infer_format(pathlib.Path('spam')) + Traceback (most recent call last): + ... + ValueError: cannot infer rendering format from outfile: 'spam' (missing suffix) + + >>> infer_format(pathlib.Path('spam.wav')) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + Traceback (most recent call last): + ... + ValueError: cannot infer rendering format from suffix '.wav' of outfile: 'spam.wav' + (unknown format: 'wav', provide outfile with a suffix from ['.bmp', ...]) + """ + if not outfile.suffix: + raise ValueError('cannot infer rendering format from outfile:' + f' {os.fspath(outfile)!r} (missing suffix)') + + start, sep, format_ = outfile.suffix.partition('.') + assert sep and not start, f"{outfile.suffix!r}.startswith('.')" + format_ = format_.lower() + + try: + parameters.verify_format(format_) + except ValueError: + raise ValueError('cannot infer rendering format' + f' from suffix {outfile.suffix!r}' + f' of outfile: {os.fspath(outfile)!r}' + f' (unknown format: {format_!r},' + ' provide outfile with a suffix' + f' from {get_supported_suffixes()!r})') + return format_ + + +def get_outfile(filepath: typing.Union[os.PathLike, str], *, + format: str, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None) -> pathlib.Path: + """Return ``filepath`` + ``[[.formatter].renderer].format``. + + See also: + https://www.graphviz.org/doc/info/command.html#-O + """ + filepath = _tools.promote_pathlike(filepath) + + parameters.verify_format(format, required=True) + parameters.verify_renderer(renderer, required=False) + parameters.verify_formatter(formatter, required=False) + + suffix_args = (formatter, renderer, format) + suffix = '.'.join(a for a in suffix_args if a is not None) + return filepath.with_suffix(f'{filepath.suffix}.{suffix}') + + +def get_filepath(outfile: typing.Union[os.PathLike, str]) -> pathlib.Path: + """Return ``outfile.with_suffix('.gv')``.""" + outfile = _tools.promote_pathlike(outfile) + return outfile.with_suffix(f'.{DEFAULT_SOURCE_EXTENSION}') + + +@typing.overload +def render(engine: str, + format: str, + filepath: typing.Union[os.PathLike, str], + renderer: typing.Optional[str] = ..., + formatter: typing.Optional[str] = ..., + neato_no_op: typing.Union[bool, int, None] = ..., + quiet: bool = ..., *, + outfile: typing.Union[os.PathLike, str, None] = ..., + raise_if_result_exists: bool = ..., + overwrite_filepath: bool = ...) -> str: + """Require ``format`` and ``filepath`` with default ``outfile=None``.""" + + +@typing.overload +def render(engine: str, + format: typing.Optional[str] = ..., + filepath: typing.Union[os.PathLike, str, None] = ..., + renderer: typing.Optional[str] = ..., + formatter: typing.Optional[str] = ..., + neato_no_op: typing.Union[bool, int, None] = ..., + quiet: bool = False, *, + outfile: typing.Union[os.PathLike, str, None] = ..., + raise_if_result_exists: bool = ..., + overwrite_filepath: bool = ...) -> str: + """Optional ``format`` and ``filepath`` with given ``outfile``.""" + + +@typing.overload +def render(engine: str, + format: typing.Optional[str] = ..., + filepath: typing.Union[os.PathLike, str, None] = ..., + renderer: typing.Optional[str] = ..., + formatter: typing.Optional[str] = ..., + neato_no_op: typing.Union[bool, int, None] = ..., + quiet: bool = False, *, + outfile: typing.Union[os.PathLike, str, None] = ..., + raise_if_result_exists: bool = ..., + overwrite_filepath: bool = ...) -> str: + """Required/optional ``format`` and ``filepath`` depending on ``outfile``.""" + + +@_tools.deprecate_positional_args(supported_number=3) +def render(engine: str, + format: typing.Optional[str] = None, + filepath: typing.Union[os.PathLike, str, None] = None, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None, + neato_no_op: typing.Union[bool, int, None] = None, + quiet: bool = False, *, + outfile: typing.Union[os.PathLike, str, None] = None, + raise_if_result_exists: bool = False, + overwrite_filepath: bool = False) -> str: + r"""Render file with ``engine`` into ``format`` and return result filename. + + Args: + engine: Layout engine for rendering (``'dot'``, ``'neato'``, ...). + format: Output format for rendering (``'pdf'``, ``'png'``, ...). + Can be omitted if an ``outfile`` with a known ``format`` is given, + i.e. if ``outfile`` ends with a known ``.{format}`` suffix. + filepath: Path to the DOT source file to render. + Can be omitted if ``outfile`` is given, + in which case it defaults to ``outfile.with_suffix('.gv')``. + renderer: Output renderer (``'cairo'``, ``'gd'``, ...). + formatter: Output formatter (``'cairo'``, ``'gd'``, ...). + neato_no_op: Neato layout engine no-op flag. + quiet: Suppress ``stderr`` output from the layout subprocess. + outfile: Path for the rendered output file. + raise_if_result_exists: Raise :exc:`graphviz.FileExistsError` + if the result file exists. + overwrite_filepath: Allow ``dot`` to write to the file it reads from. + Incompatible with ``raise_if_result_exists``. + + Returns: + The (possibly relative) path of the rendered file. + + Raises: + ValueError: If ``engine``, ``format``, ``renderer``, or ``formatter`` + are unknown. + graphviz.RequiredArgumentError: If ``format`` or ``filepath`` are None + unless ``outfile`` is given. + graphviz.RequiredArgumentError: If ``formatter`` is given + but ``renderer`` is None. + ValueError: If ``outfile`` and ``filename`` are the same file + unless ``overwite_filepath=True``. + graphviz.ExecutableNotFound: If the Graphviz ``dot`` executable + is not found. + graphviz.CalledProcessError: If the returncode (exit status) + of the rendering ``dot`` subprocess is non-zero. + graphviz.FileExistsError: If ``raise_if_exists`` + and the result file exists. + + Warns: + graphviz.UnknownSuffixWarning: If the suffix of ``outfile`` + is empty or unknown. + graphviz.FormatSuffixMismatchWarning: If the suffix of ``outfile`` + does not match the given ``format``. + + Example: + >>> doctest_mark_exe() + >>> import pathlib + >>> import graphviz + >>> assert pathlib.Path('doctest-output/spam.gv').write_text('graph { spam }') == 14 + >>> graphviz.render('dot', 'png', 'doctest-output/spam.gv').replace('\\', '/') + 'doctest-output/spam.gv.png' + >>> graphviz.render('dot', filepath='doctest-output/spam.gv', + ... outfile='doctest-output/spam.png').replace('\\', '/') + 'doctest-output/spam.png' + >>> graphviz.render('dot', outfile='doctest-output/spam.pdf').replace('\\', '/') + 'doctest-output/spam.pdf' + + Note: + The layout command is started from the directory of ``filepath``, + so that references to external files + (e.g. ``[image=images/camelot.png]``) + can be given as paths relative to the DOT source file. + + See also: + Upstream docs: https://www.graphviz.org/doc/info/command.html + """ + if raise_if_result_exists and overwrite_filepath: + raise ValueError('overwrite_filepath cannot be combined' + ' with raise_if_result_exists') + + filepath, outfile = map(_tools.promote_pathlike, (filepath, outfile)) + + if outfile is not None: + format = get_format(outfile, format=format) + + if filepath is None: + filepath = get_filepath(outfile) + + if (not overwrite_filepath and outfile.name == filepath.name + and outfile.resolve() == filepath.resolve()): # noqa: E129 + raise ValueError(f'outfile {outfile.name!r} must be different' + f' from input file {filepath.name!r}' + ' (pass overwrite_filepath=True to override)') + + outfile_arg = (outfile.resolve() if outfile.parent != filepath.parent + else outfile.name) + + # https://www.graphviz.org/doc/info/command.html#-o + args = ['-o', outfile_arg, filepath.name] + elif filepath is None: + raise exceptions.RequiredArgumentError('filepath: (required if outfile is not given,' + f' got {filepath!r})') + elif format is None: + raise exceptions.RequiredArgumentError('format: (required if outfile is not given,' + f' got {format!r})') + else: + outfile = get_outfile(filepath, + format=format, + renderer=renderer, + formatter=formatter) + # https://www.graphviz.org/doc/info/command.html#-O + args = ['-O', filepath.name] + + cmd = dot_command.command(engine, format, + renderer=renderer, + formatter=formatter, + neato_no_op=neato_no_op) + + if raise_if_result_exists and os.path.exists(outfile): + raise exceptions.FileExistsError(f'output file exists: {os.fspath(outfile)!r}') + + cmd += args + + assert filepath is not None, 'work around pytype false alarm' + + execute.run_check(cmd, + cwd=filepath.parent if filepath.parent.parts else None, + quiet=quiet, + capture_output=True) + + return os.fspath(outfile) diff --git a/.venv/Lib/site-packages/graphviz/backend/unflattening.py b/.venv/Lib/site-packages/graphviz/backend/unflattening.py new file mode 100644 index 00000000..a386b8cd --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/backend/unflattening.py @@ -0,0 +1,63 @@ +"""Pipe DOT source code through ``unflatten``.""" + +import pathlib +import typing + +from ..encoding import DEFAULT_ENCODING +from .. import _tools +from .. import exceptions + +from . import execute + +__all__ = ['UNFLATTEN_BINARY', 'unflatten'] + +UNFLATTEN_BINARY = pathlib.Path('unflatten') + + +@_tools.deprecate_positional_args(supported_number=1) +def unflatten(source: str, + stagger: typing.Optional[int] = None, + fanout: bool = False, + chain: typing.Optional[int] = None, + encoding: str = DEFAULT_ENCODING) -> str: + """Return DOT ``source`` piped through ``unflatten`` preprocessor as string. + + Args: + source: DOT source to process + (improve layout aspect ratio). + stagger: Stagger the minimum length of leaf edges + between 1 and this small integer. + fanout: Fanout nodes with indegree = outdegree = 1 + when staggering (requires ``stagger``). + chain: Form disconnected nodes into chains of up to this many nodes. + encoding: Encoding to encode unflatten stdin and decode its stdout. + + Returns: + Decoded stdout of the Graphviz unflatten command. + + Raises: + graphviz.RequiredArgumentError: If ``fanout`` is given + but no ``stagger``. + graphviz.ExecutableNotFound: If the Graphviz 'unflatten' executable + is not found. + graphviz.CalledProcessError: If the returncode (exit status) + of the unflattening 'unflatten' subprocess is non-zero. + + See also: + Upstream documentation: + https://www.graphviz.org/pdf/unflatten.1.pdf + """ + if fanout and stagger is None: + raise exceptions.RequiredArgumentError('fanout given without stagger') + + cmd = [UNFLATTEN_BINARY] + if stagger is not None: + cmd += ['-l', str(stagger)] + if fanout: + cmd.append('-f') + if chain is not None: + cmd += ['-c', str(chain)] + + proc = execute.run_check(cmd, input=source, encoding=encoding, + capture_output=True) + return proc.stdout diff --git a/.venv/Lib/site-packages/graphviz/backend/upstream_version.py b/.venv/Lib/site-packages/graphviz/backend/upstream_version.py new file mode 100644 index 00000000..df2c1e61 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/backend/upstream_version.py @@ -0,0 +1,62 @@ +"""Return the version number from running ``dot -V``.""" + +import logging +import re +import subprocess +import typing + +from . import dot_command +from . import execute + +VERSION_PATTERN = re.compile(r''' + graphviz[ ] + version[ ] + (\d+)\.(\d+) + (?:\.(\d+) + (?: + ~dev\.\d{8}\.\d{4} + | + \.(\d+) + )? + )? + [ ] + ''', re.VERBOSE) + + +log = logging.getLogger(__name__) + + +def version() -> typing.Tuple[int, ...]: + """Return the upstream version number tuple from ``stderr`` of ``dot -V``. + + Returns: + Two, three, or four ``int`` version ``tuple``. + + Raises: + graphviz.ExecutableNotFound: If the Graphviz executable is not found. + graphviz.CalledProcessError: If the exit status is non-zero. + RuntimeError: If the output cannot be parsed into a version number. + + Example: + >>> doctest_mark_exe() + >>> import graphviz + >>> graphviz.version() # doctest: +ELLIPSIS + (...) + + Note: + Ignores the ``~dev.`` portion of development versions. + + See also: + Upstream release version entry format: + https://gitlab.com/graphviz/graphviz/-/blob/f94e91ba819cef51a4b9dcb2d76153684d06a913/gen_version.py#L17-20 + """ + cmd = [dot_command.DOT_BINARY, '-V'] + proc = execute.run_check(cmd, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + encoding='ascii') + + ma = VERSION_PATTERN.search(proc.stdout) + if ma is None: + raise RuntimeError(f'cannot parse {cmd!r} output: {proc.stdout!r}') + + return tuple(int(d) for d in ma.groups() if d is not None) diff --git a/.venv/Lib/site-packages/graphviz/backend/viewing.py b/.venv/Lib/site-packages/graphviz/backend/viewing.py new file mode 100644 index 00000000..fde74a66 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/backend/viewing.py @@ -0,0 +1,71 @@ +"""Open files in platform-specific default viewing application.""" + +import logging +import os +import platform +import subprocess +import typing + +from .. import _tools + +__all__ = ['view'] + +PLATFORM = platform.system().lower() + + +log = logging.getLogger(__name__) + + +@_tools.deprecate_positional_args(supported_number=1) +def view(filepath: typing.Union[os.PathLike, str], + quiet: bool = False) -> None: + """Open filepath with its default viewing application (platform-specific). + + Args: + filepath: Path to the file to open in viewer. + quiet: Suppress ``stderr`` output + from the viewer process (ineffective on Windows). + + Raises: + RuntimeError: If the current platform is not supported. + + Note: + There is no option to wait for the application to close, + and no way to retrieve the application's exit status. + """ + try: + view_func = getattr(view, PLATFORM) + except AttributeError: + raise RuntimeError(f'platform {PLATFORM!r} not supported') + view_func(filepath, quiet=quiet) + + +@_tools.attach(view, 'darwin') +def view_darwin(filepath: typing.Union[os.PathLike, str], *, + quiet: bool) -> None: + """Open filepath with its default application (mac).""" + cmd = ['open', filepath] + log.debug('view: %r', cmd) + kwargs = {'stderr': subprocess.DEVNULL} if quiet else {} + subprocess.Popen(cmd, **kwargs) + + +@_tools.attach(view, 'linux') +@_tools.attach(view, 'freebsd') +def view_unixoid(filepath: typing.Union[os.PathLike, str], *, + quiet: bool) -> None: + """Open filepath in the user's preferred application (linux, freebsd).""" + cmd = ['xdg-open', filepath] + log.debug('view: %r', cmd) + kwargs = {'stderr': subprocess.DEVNULL} if quiet else {} + subprocess.Popen(cmd, **kwargs) + + +@_tools.attach(view, 'windows') +def view_windows(filepath: typing.Union[os.PathLike, str], *, + quiet: bool) -> None: + """Start filepath with its associated application (windows).""" + # TODO: implement quiet=True + filepath = os.path.normpath(filepath) + log.debug('view: %r', filepath) + os.startfile(filepath) # pytype: disable=module-attr diff --git a/.venv/Lib/site-packages/graphviz/base.py b/.venv/Lib/site-packages/graphviz/base.py new file mode 100644 index 00000000..e2bcf554 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/base.py @@ -0,0 +1,32 @@ +"""Iterables of DOT source code lines (including final newline).""" + +import typing + +from . import copying + +__all__ = ['Base'] + + +class LineIterable: + """Iterable of DOT Source code lines + (mimics ``file`` objects in text mode).""" + + def __iter__(self) -> typing.Iterator[str]: # pragma: no cover + r"""Yield the generated DOT source line by line. + + Yields: Line ending with a newline (``'\n'``). + """ + raise NotImplementedError('to be implemented by concrete subclasses') + + +# Common base interface for all exposed classes +class Base(LineIterable, copying.CopyBase): + """LineIterator with ``.source`` attribute, that it returns for ``str()``.""" + + @property + def source(self) -> str: # pragma: no cover + raise NotImplementedError('to be implemented by concrete subclasses') + + def __str__(self) -> str: + """The DOT source code as string.""" + return self.source diff --git a/.venv/Lib/site-packages/graphviz/copying.py b/.venv/Lib/site-packages/graphviz/copying.py new file mode 100644 index 00000000..e031364f --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/copying.py @@ -0,0 +1,20 @@ +"""Create new instance copies with cooperative ``super()`` calls.""" + +__all__ = ['CopyBase'] + + +class CopyBase: + """Create new instance copies with cooperative ``super()`` calls.""" + + def copy(self): + """Return a copied instance of the object. + + Returns: + An independent copy of the current object. + """ + kwargs = self._copy_kwargs() + return self.__class__(**kwargs) + + def _copy_kwargs(self, **kwargs): + """Return the kwargs to create a copy of the instance.""" + return kwargs diff --git a/.venv/Lib/site-packages/graphviz/dot.py b/.venv/Lib/site-packages/graphviz/dot.py new file mode 100644 index 00000000..70f90c00 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/dot.py @@ -0,0 +1,344 @@ +"""Create DOT code with method-calls.""" + +import contextlib +import typing + +from . import _tools +from . import base +from . import quoting + +__all__ = ['GraphSyntax', 'DigraphSyntax', 'Dot'] + + +def comment(line: str) -> str: + """Return comment header line.""" + return f'// {line}\n' + + +def graph_head(name: str) -> str: + """Return DOT graph head line.""" + return f'graph {name}{{\n' + + +def digraph_head(name: str) -> str: + """Return DOT digraph head line.""" + return f'digraph {name}{{\n' + + +def graph_edge(*, tail: str, head: str, attr: str) -> str: + """Return DOT graph edge statement line.""" + return f'\t{tail} -- {head}{attr}\n' + + +def digraph_edge(*, tail: str, head: str, attr: str) -> str: + """Return DOT digraph edge statement line.""" + return f'\t{tail} -> {head}{attr}\n' + + +class GraphSyntax: + """DOT graph head and edge syntax.""" + + _head = staticmethod(graph_head) + + _edge = staticmethod(graph_edge) + + +class DigraphSyntax: + """DOT digraph head and edge syntax.""" + + _head = staticmethod(digraph_head) + + _edge = staticmethod(digraph_edge) + + +def subgraph(name: str) -> str: + """Return DOT subgraph head line.""" + return f'subgraph {name}{{\n' + + +def subgraph_plain(name: str) -> str: + """Return plain DOT subgraph head line.""" + return f'{name}{{\n' + + +def node(left: str, right: str) -> str: + """Return DOT node statement line.""" + return f'\t{left}{right}\n' + + +class Dot(quoting.Quote, base.Base): + """Assemble DOT source code.""" + + directed: bool + + _comment = staticmethod(comment) + + @staticmethod + def _head(name: str) -> str: # pragma: no cover + """Return DOT head line.""" + raise NotImplementedError('must be implemented by concrete subclasses') + + @classmethod + def _head_strict(cls, name: str) -> str: + """Return DOT strict head line.""" + return f'strict {cls._head(name)}' + + _tail = '}\n' + + _subgraph = staticmethod(subgraph) + + _subgraph_plain = staticmethod(subgraph_plain) + + _node = _attr = staticmethod(node) + + @classmethod + def _attr_plain(cls, left: str) -> str: + return cls._attr(left, '') + + @staticmethod + def _edge(*, tail: str, head: str, attr: str) -> str: # pragma: no cover + """Return DOT edge statement line.""" + raise NotImplementedError('must be implemented by concrete subclasses') + + @classmethod + def _edge_plain(cls, *, tail: str, head: str) -> str: + """Return plain DOT edge statement line.""" + return cls._edge(tail=tail, head=head, attr='') + + def __init__(self, *, + name: typing.Optional[str] = None, + comment: typing.Optional[str] = None, + graph_attr=None, node_attr=None, edge_attr=None, body=None, + strict: bool = False, **kwargs) -> None: + super().__init__(**kwargs) + + self.name = name + """str: DOT source identifier for the ``graph`` or ``digraph`` statement.""" + + self.comment = comment + """str: DOT source comment for the first source line.""" + + self.graph_attr = dict(graph_attr) if graph_attr is not None else {} + """~typing.Dict[str, str]: Attribute-value pairs applying to the graph.""" + + self.node_attr = dict(node_attr) if node_attr is not None else {} + """~typing.Dict[str, str]: Attribute-value pairs applying to all nodes.""" + + self.edge_attr = dict(edge_attr) if edge_attr is not None else {} + """~typing.Dict[str, str]: Attribute-value pairs applying to all edges.""" + + self.body = list(body) if body is not None else [] + """~typing.List[str]: Verbatim DOT source lines including final newline.""" + + self.strict = strict + """bool: Rendering should merge multi-edges.""" + + def _copy_kwargs(self, **kwargs): + """Return the kwargs to create a copy of the instance.""" + return super()._copy_kwargs(name=self.name, + comment=self.comment, + graph_attr=dict(self.graph_attr), + node_attr=dict(self.node_attr), + edge_attr=dict(self.edge_attr), + body=list(self.body), + strict=self.strict) + + @_tools.deprecate_positional_args(supported_number=1) + def clear(self, keep_attrs: bool = False) -> None: + """Reset content to an empty body, clear graph/node/egde_attr mappings. + + Args: + keep_attrs (bool): preserve graph/node/egde_attr mappings + """ + if not keep_attrs: + for a in (self.graph_attr, self.node_attr, self.edge_attr): + a.clear() + self.body.clear() + + @_tools.deprecate_positional_args(supported_number=1) + def __iter__(self, subgraph: bool = False) -> typing.Iterator[str]: + r"""Yield the DOT source code line by line (as graph or subgraph). + + Yields: Line ending with a newline (``'\n'``). + """ + if self.comment: + yield self._comment(self.comment) + + if subgraph: + if self.strict: + raise ValueError('subgraphs cannot be strict') + head = self._subgraph if self.name else self._subgraph_plain + else: + head = self._head_strict if self.strict else self._head + yield head(self._quote(self.name) + ' ' if self.name else '') + + for kw in ('graph', 'node', 'edge'): + attrs = getattr(self, f'{kw}_attr') + if attrs: + yield self._attr(kw, self._attr_list(None, kwargs=attrs)) + + yield from self.body + + yield self._tail + + @_tools.deprecate_positional_args(supported_number=3) + def node(self, name: str, + label: typing.Optional[str] = None, + _attributes=None, **attrs) -> None: + """Create a node. + + Args: + name: Unique identifier for the node inside the source. + label: Caption to be displayed (defaults to the node ``name``). + attrs: Any additional node attributes (must be strings). + + Attention: + When rendering ``label``, backslash-escapes + and strings of the form ``<...>`` have a special meaning. + See the sections :ref:`backslash-escapes` and + :ref:`quoting-and-html-like-labels` in the user guide for details. + """ + name = self._quote(name) + attr_list = self._attr_list(label, kwargs=attrs, attributes=_attributes) + line = self._node(name, attr_list) + self.body.append(line) + + @_tools.deprecate_positional_args(supported_number=4) + def edge(self, tail_name: str, head_name: str, + label: typing.Optional[str] = None, + _attributes=None, **attrs) -> None: + """Create an edge between two nodes. + + Args: + tail_name: Start node identifier + (format: ``node[:port[:compass]]``). + head_name: End node identifier + (format: ``node[:port[:compass]]``). + label: Caption to be displayed near the edge. + attrs: Any additional edge attributes (must be strings). + + Note: + The ``tail_name`` and ``head_name`` strings are separated + by (optional) colon(s) into ``node`` name, ``port`` name, + and ``compass`` (e.g. ``sw``). + See :ref:`details in the User Guide `. + + Attention: + When rendering ``label``, backslash-escapes + and strings of the form ``<...>`` have a special meaning. + See the sections :ref:`backslash-escapes` and + :ref:`quoting-and-html-like-labels` in the user guide for details. + """ + tail_name = self._quote_edge(tail_name) + head_name = self._quote_edge(head_name) + attr_list = self._attr_list(label, kwargs=attrs, attributes=_attributes) + line = self._edge(tail=tail_name, head=head_name, attr=attr_list) + self.body.append(line) + + def edges(self, tail_head_iter) -> None: + """Create a bunch of edges. + + Args: + tail_head_iter: Iterable of ``(tail_name, head_name)`` pairs + (format:``node[:port[:compass]]``). + + + Note: + The ``tail_name`` and ``head_name`` strings are separated + by (optional) colon(s) into ``node`` name, ``port`` name, + and ``compass`` (e.g. ``sw``). + See :ref:`details in the User Guide `. + """ + edge = self._edge_plain + quote = self._quote_edge + self.body += [edge(tail=quote(t), head=quote(h)) + for t, h in tail_head_iter] + + @_tools.deprecate_positional_args(supported_number=2) + def attr(self, kw: typing.Optional[str] = None, + _attributes=None, **attrs) -> None: + """Add a general or graph/node/edge attribute statement. + + Args: + kw: Attributes target + (``None`` or ``'graph'``, ``'node'``, ``'edge'``). + attrs: Attributes to be set (must be strings, may be empty). + + See the :ref:`usage examples in the User Guide `. + """ + if kw is not None and kw.lower() not in ('graph', 'node', 'edge'): + raise ValueError('attr statement must target graph, node, or edge:' + f' {kw!r}') + if attrs or _attributes: + if kw is None: + a_list = self._a_list(None, kwargs=attrs, attributes=_attributes) + line = self._attr_plain(a_list) + else: + attr_list = self._attr_list(None, kwargs=attrs, attributes=_attributes) + line = self._attr(kw, attr_list) + self.body.append(line) + + @_tools.deprecate_positional_args(supported_number=2) + def subgraph(self, graph=None, + name: typing.Optional[str] = None, + comment: typing.Optional[str] = None, + graph_attr=None, node_attr=None, edge_attr=None, + body=None): + """Add the current content of the given sole ``graph`` argument + as subgraph or return a context manager + returning a new graph instance + created with the given (``name``, ``comment``, etc.) arguments + whose content is added as subgraph + when leaving the context manager's ``with``-block. + + Args: + graph: An instance of the same kind + (:class:`.Graph`, :class:`.Digraph`) as the current graph + (sole argument in non-with-block use). + name: Subgraph name (``with``-block use). + comment: Subgraph comment (``with``-block use). + graph_attr: Subgraph-level attribute-value mapping + (``with``-block use). + node_attr: Node-level attribute-value mapping + (``with``-block use). + edge_attr: Edge-level attribute-value mapping + (``with``-block use). + body: Verbatim lines to add to the subgraph ``body`` + (``with``-block use). + + See the :ref:`usage examples in the User Guide `. + + When used as a context manager, the returned new graph instance + uses ``strict=None`` and the parent graph's values + for ``directory``, ``format``, ``engine``, and ``encoding`` by default. + + Note: + If the ``name`` of the subgraph begins with + ``'cluster'`` (all lowercase) + the layout engine will treat it as a special cluster subgraph. + """ + if graph is None: + kwargs = self._copy_kwargs() + kwargs.pop('filename', None) + kwargs.update(name=name, comment=comment, + graph_attr=graph_attr, node_attr=node_attr, edge_attr=edge_attr, + body=body, strict=None) + subgraph = self.__class__(**kwargs) + + @contextlib.contextmanager + def subgraph_contextmanager(*, parent): + """Return subgraph and add to parent on exit.""" + yield subgraph + parent.subgraph(subgraph) + + return subgraph_contextmanager(parent=self) + + args = [name, comment, graph_attr, node_attr, edge_attr, body] + if not all(a is None for a in args): + raise ValueError('graph must be sole argument of subgraph()') + + if graph.directed != self.directed: + raise ValueError(f'{self!r} cannot add subgraph of different kind:' + f' {graph!r}') + + self.body += [f'\t{line}' for line in graph.__iter__(subgraph=True)] diff --git a/.venv/Lib/site-packages/graphviz/encoding.py b/.venv/Lib/site-packages/graphviz/encoding.py new file mode 100644 index 00000000..d00b5d91 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/encoding.py @@ -0,0 +1,41 @@ +"""Encoding parameter handling and default.""" + +import typing + +import codecs +import locale + +from . import copying + +__all__ = ['DEFAULT_ENCODING', 'Encoding'] + +DEFAULT_ENCODING = 'utf-8' + + +class Encoding(copying.CopyBase): + """Encoding used for input and output with ``'utf-8'`` default.""" + + _encoding = DEFAULT_ENCODING + + def __init__(self, *, encoding: typing.Optional[str] = DEFAULT_ENCODING, + **kwargs) -> None: + super().__init__(**kwargs) + + self.encoding = encoding + + def _copy_kwargs(self, **kwargs): + """Return the kwargs to create a copy of the instance.""" + return super()._copy_kwargs(encoding=self._encoding, **kwargs) + + @property + def encoding(self) -> str: + """The encoding for the saved source file.""" + return self._encoding + + @encoding.setter + def encoding(self, encoding: typing.Optional[str]) -> None: + if encoding is None: + encoding = locale.getpreferredencoding() + + codecs.lookup(encoding) # raise early + self._encoding = encoding diff --git a/.venv/Lib/site-packages/graphviz/exceptions.py b/.venv/Lib/site-packages/graphviz/exceptions.py new file mode 100644 index 00000000..dc0dbc1f --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/exceptions.py @@ -0,0 +1,31 @@ +"""Commonly used exception classes.""" + +from .backend.execute import ExecutableNotFound, CalledProcessError + +__all__ = ['ExecutableNotFound', 'CalledProcessError', + 'RequiredArgumentError', 'FileExistsError', + 'UnknownSuffixWarning', 'FormatSuffixMismatchWarning', + 'DotSyntaxWarning'] + + +class RequiredArgumentError(TypeError): + """:exc:`TypeError` raised if a required argument is missing.""" + + +class FileExistsError(FileExistsError): + """:exc:`FileExistsError` raised with ``raise_if_exists=True``.""" + + +class UnknownSuffixWarning(RuntimeWarning): + """:exc:`RuntimeWarning` raised if the suffix of ``outfile`` is unknown + and the given ``format`` is used instead.""" + + +class FormatSuffixMismatchWarning(UserWarning): + """:exc:`UserWarning` raised if the suffix ``outfile`` + does not match the given ``format``.""" + + +class DotSyntaxWarning(RuntimeWarning): + """:exc:`RuntimeWarning` raised if a quoted string + is expected to cause a ``CalledProcessError`` from rendering.""" diff --git a/.venv/Lib/site-packages/graphviz/graphs.py b/.venv/Lib/site-packages/graphviz/graphs.py new file mode 100644 index 00000000..9018e57c --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/graphs.py @@ -0,0 +1,123 @@ +r"""Assemble DOT source code objects. + +Example: + >>> doctest_mark_exe() + + >>> import graphviz + >>> dot = graphviz.Graph(comment='Mønti Pythøn ik den Hølie Grailen') + + >>> dot.node('Møøse') + >>> dot.node('trained_by', 'trained by') + >>> dot.node('tutte', 'TUTTE HERMSGERVORDENBROTBORDA') + + >>> dot.edge('Møøse', 'trained_by') + >>> dot.edge('trained_by', 'tutte') + + >>> dot.node_attr['shape'] = 'rectangle' + + >>> print(dot.source) #doctest: +NORMALIZE_WHITESPACE + // Mønti Pythøn ik den Hølie Grailen + graph { + node [shape=rectangle] + "Møøse" + trained_by [label="trained by"] + tutte [label="TUTTE HERMSGERVORDENBROTBORDA"] + "Møøse" -- trained_by + trained_by -- tutte + } + + >>> dot.render('doctest-output/m00se.gv').replace('\\', '/') + 'doctest-output/m00se.gv.pdf' +""" + +import typing + +from .encoding import DEFAULT_ENCODING +from . import _tools +from . import dot +from . import jupyter_integration +from . import piping +from . import rendering +from . import unflattening + +__all__ = ['Graph', 'Digraph'] + + +class BaseGraph(dot.Dot, + rendering.Render, + jupyter_integration.JupyterIntegration, piping.Pipe, + unflattening.Unflatten): + """Dot language creation and source code rendering.""" + + @_tools.deprecate_positional_args(supported_number=2) + def __init__(self, name: typing.Optional[str] = None, + comment: typing.Optional[str] = None, + filename=None, directory=None, + format: typing.Optional[str] = None, + engine: typing.Optional[str] = None, + encoding: typing.Optional[str] = DEFAULT_ENCODING, + graph_attr=None, node_attr=None, edge_attr=None, + body=None, + strict: bool = False, *, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None) -> None: + if filename is None and name is not None: + filename = f'{name}.{self._default_extension}' + + super().__init__(name=name, comment=comment, + graph_attr=graph_attr, + node_attr=node_attr, edge_attr=edge_attr, + body=body, strict=strict, + filename=filename, directory=directory, + encoding=encoding, + format=format, engine=engine, + renderer=renderer, formatter=formatter) + + @property + def source(self) -> str: + """The generated DOT source code as string.""" + return ''.join(self) + + +class Graph(dot.GraphSyntax, BaseGraph): + """Graph source code in the DOT language. + + Args: + name: Graph name used in the source code. + comment: Comment added to the first line of the source. + filename: Filename for saving the source + (defaults to ``name`` + ``'.gv'``). + directory: (Sub)directory for source saving and rendering. + format: Rendering output format (``'pdf'``, ``'png'``, ...). + engine: Layout command used (``'dot'``, ``'neato'``, ...). + renderer: Output renderer used (``'cairo'``, ``'gd'``, ...). + formatter: Output formatter used (``'cairo'``, ``'gd'``, ...). + encoding: Encoding for saving the source. + graph_attr: Mapping of ``(attribute, value)`` pairs for the graph. + node_attr: Mapping of ``(attribute, value)`` pairs set for all nodes. + edge_attr: Mapping of ``(attribute, value)`` pairs set for all edges. + body: Iterable of verbatim lines (including their final newline) + to add to the graph ``body``. + strict (bool): Rendering should merge multi-edges. + + Note: + All parameters are `optional` and can be changed under their + corresponding attribute name after instance creation. + """ + + @property + def directed(self) -> bool: + """``False``""" + return False + + +class Digraph(dot.DigraphSyntax, BaseGraph): + """Directed graph source code in the DOT language.""" + + if Graph.__doc__ is not None: + __doc__ += Graph.__doc__.partition('.')[2] + + @property + def directed(self) -> bool: + """``True``""" + return True diff --git a/.venv/Lib/site-packages/graphviz/jupyter_integration.py b/.venv/Lib/site-packages/graphviz/jupyter_integration.py new file mode 100644 index 00000000..9a7d1361 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/jupyter_integration.py @@ -0,0 +1,112 @@ +"""Display rendered graph as SVG in Jupyter Notebooks and QtConsole.""" + +import typing + +from . import piping + +__all__ = ['JUPYTER_FORMATS', + 'SUPPORTED_JUPYTER_FORMATS', 'DEFAULT_JUPYTER_FORMAT', + 'get_jupyter_format_mimetype', + 'JupyterIntegration'] + +_IMAGE_JPEG = 'image/jpeg' + +JUPYTER_FORMATS = {'jpeg': _IMAGE_JPEG, + 'jpg': _IMAGE_JPEG, + 'png': 'image/png', + 'svg': 'image/svg+xml'} + +SUPPORTED_JUPYTER_FORMATS = set(JUPYTER_FORMATS) + +DEFAULT_JUPYTER_FORMAT = next(_ for _ in SUPPORTED_JUPYTER_FORMATS if _ == 'svg') + +MIME_TYPES = {'image/jpeg': '_repr_image_jpeg', + 'image/png': '_repr_image_png', + 'image/svg+xml': '_repr_image_svg_xml'} + +assert MIME_TYPES.keys() == set(JUPYTER_FORMATS.values()) + +SVG_ENCODING = 'utf-8' + + +def get_jupyter_format_mimetype(jupyter_format: str) -> str: + try: + return JUPYTER_FORMATS[jupyter_format] + except KeyError: + raise ValueError(f'unknown jupyter_format: {jupyter_format!r}' + f' (must be one of {sorted(JUPYTER_FORMATS)})') + + +def get_jupyter_mimetype_format(mimetype: str) -> str: + if mimetype not in MIME_TYPES: + raise ValueError(f'unsupported mimetype: {mimetype!r}' + f' (must be one of {sorted(MIME_TYPES)})') + + assert mimetype in JUPYTER_FORMATS.values() + + for format, jupyter_mimetype in JUPYTER_FORMATS.items(): + if jupyter_mimetype == mimetype: + return format + + raise RuntimeError # pragma: no cover + + +class JupyterIntegration(piping.Pipe): + """Display rendered graph as SVG in Jupyter Notebooks and QtConsole.""" + + _jupyter_mimetype = get_jupyter_format_mimetype(DEFAULT_JUPYTER_FORMAT) + + def _repr_mimebundle_(self, + include: typing.Optional[typing.Iterable[str]] = None, + exclude: typing.Optional[typing.Iterable[str]] = None, + **_) -> typing.Dict[str, typing.Union[bytes, str]]: + r"""Return the rendered graph as IPython mimebundle. + + Args: + include: Iterable of mimetypes to include in the result. + If not given or ``None``: ``['image/sxg+xml']``. + exclude: Iterable of minetypes to exclude from the result. + Overrides ``include``. + + Returns: + Mapping from mimetypes to data. + + Example: + >>> doctest_mark_exe() + >>> import graphviz + >>> dot = graphviz.Graph() + >>> dot._repr_mimebundle_() # doctest: +ELLIPSIS + {'image/svg+xml': '>> dot._repr_mimebundle_(include=['image/png']) # doctest: +ELLIPSIS + {'image/png': b'\x89PNG... + >>> dot._repr_mimebundle_(include=[]) + {} + >>> dot._repr_mimebundle_(include=['image/svg+xml', 'image/jpeg'], + ... exclude=['image/svg+xml']) # doctest: +ELLIPSIS + {'image/jpeg': b'\xff... + >>> list(dot._repr_mimebundle_(include=['image/png', 'image/jpeg'])) + ['image/jpeg', 'image/png'] + + See also: + IPython documentation: + - https://ipython.readthedocs.io/en/stable/api/generated/IPython.display.html#functions + - https://ipython.readthedocs.io/en/stable/config/integrating.html#MyObject._repr_mimebundle_ # noqa: E501 + - https://nbviewer.org/github/ipython/ipython/blob/master/examples/IPython%20Kernel/Custom%20Display%20Logic.ipynb#Custom-Mimetypes-with-_repr_mimebundle_ # noqa: E501 + """ + include = set(include) if include is not None else {self._jupyter_mimetype} + include -= set(exclude or []) + return {mimetype: getattr(self, method_name)() + for mimetype, method_name in MIME_TYPES.items() + if mimetype in include} + + def _repr_image_jpeg(self) -> bytes: + """Return the rendered graph as JPEG bytes.""" + return self.pipe(format='jpeg') + + def _repr_image_png(self) -> bytes: + """Return the rendered graph as PNG bytes.""" + return self.pipe(format='png') + + def _repr_image_svg_xml(self) -> str: + """Return the rendered graph as SVG string.""" + return self.pipe(format='svg', encoding=SVG_ENCODING) diff --git a/.venv/Lib/site-packages/graphviz/parameters/__init__.py b/.venv/Lib/site-packages/graphviz/parameters/__init__.py new file mode 100644 index 00000000..93e30ab3 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/parameters/__init__.py @@ -0,0 +1,13 @@ +"""Hold and verify parameters for running Graphviz ``dot``.""" + +from .engines import ENGINES, verify_engine +from .formats import FORMATS, verify_format +from .renderers import RENDERERS, verify_renderer +from .formatters import FORMATTERS, verify_formatter + +from . mixins import Parameters + +__all__ = ['ENGINES', 'FORMATS', 'RENDERERS', 'FORMATTERS', + 'verify_engine', 'verify_format', + 'verify_renderer', 'verify_formatter', + 'Parameters'] diff --git a/.venv/Lib/site-packages/graphviz/parameters/base.py b/.venv/Lib/site-packages/graphviz/parameters/base.py new file mode 100644 index 00000000..75278d1a --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/parameters/base.py @@ -0,0 +1,16 @@ +"""Rendering parameter handling.""" + +from .. import copying + +__all__ = ['ParameterBase'] + + +class ParameterBase(copying.CopyBase): + """Rendering parameter.""" + + def _getattr_from_dict(self, attrname: str, *, default=None): + """Return self.attrname if attrname is in the instance dictionary + (as oposed to on the type).""" + if attrname in self.__dict__: + return getattr(self, attrname) + return default diff --git a/.venv/Lib/site-packages/graphviz/parameters/engines.py b/.venv/Lib/site-packages/graphviz/parameters/engines.py new file mode 100644 index 00000000..8280c713 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/parameters/engines.py @@ -0,0 +1,62 @@ +"""Rendering engine parameter handling.""" + +import typing + +from . import base + +__all__ = ['ENGINES', 'verify_engine', 'Engine'] + +ENGINES = {'dot', # https://www.graphviz.org/pdf/dot.1.pdf + 'neato', + 'twopi', + 'circo', + 'fdp', + 'sfdp', + 'patchwork', + 'osage'} + +DEFAULT_ENGINE = 'dot' + +REQUIRED = True + + +def verify_engine(engine: str, *, required: bool = REQUIRED) -> None: + if engine is None: + if required: + raise ValueError('missing engine') + elif engine.lower() not in ENGINES: + raise ValueError(f'unknown engine: {engine!r}' + f' (must be one of {sorted(ENGINES)})') + + +class Engine(base.ParameterBase): + """Rendering engine parameter with ``'dot''`` default.""" + + _engine = DEFAULT_ENGINE + + _verify_engine = staticmethod(verify_engine) + + def __init__(self, *, engine: typing.Optional[str] = None, **kwargs) -> None: + super().__init__(**kwargs) + + if engine is not None: + self.engine = engine + + def _copy_kwargs(self, **kwargs): + """Return the kwargs to create a copy of the instance.""" + engine = self._getattr_from_dict('_engine') + if engine is not None: + kwargs['engine'] = engine + return super()._copy_kwargs(**kwargs) + + @property + def engine(self) -> str: + """The layout engine used for rendering + (``'dot'``, ``'neato'``, ...).""" + return self._engine + + @engine.setter + def engine(self, engine: str) -> None: + engine = engine.lower() + self._verify_engine(engine) + self._engine = engine diff --git a/.venv/Lib/site-packages/graphviz/parameters/formats.py b/.venv/Lib/site-packages/graphviz/parameters/formats.py new file mode 100644 index 00000000..46ef1c67 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/parameters/formats.py @@ -0,0 +1,90 @@ +"""Rendering format parameter handling.""" + +import typing + +from . import base + +__all__ = ['FORMATS', 'verify_format', 'Format'] + +FORMATS = {'bmp', # https://graphviz.org/docs/outputs/ + 'canon', 'dot', 'gv', 'xdot', 'xdot1.2', 'xdot1.4', + 'cgimage', + 'cmap', + 'eps', + 'exr', + 'fig', + 'gd', 'gd2', + 'gif', + 'gtk', + 'ico', + 'imap', 'cmapx', + 'imap_np', 'cmapx_np', + 'ismap', + 'jp2', + 'jpg', 'jpeg', 'jpe', + 'json', 'json0', 'dot_json', 'xdot_json', # Graphviz 2.40 + 'pct', 'pict', + 'pdf', + 'pic', + 'plain', 'plain-ext', + 'png', + 'pov', + 'ps', + 'ps2', + 'psd', + 'sgi', + 'svg', 'svgz', + 'tga', + 'tif', 'tiff', + 'tk', + 'vml', 'vmlz', + 'vrml', + 'wbmp', + 'webp', + 'xlib', 'x11'} + +DEFAULT_FORMAT = 'pdf' + +REQUIRED = True + + +def verify_format(format: str, *, required: bool = REQUIRED) -> None: + if format is None: + if required: + raise ValueError('missing format') + elif format.lower() not in FORMATS: + raise ValueError(f'unknown format: {format!r}' + f' (must be one of {sorted(FORMATS)})') + + +class Format(base.ParameterBase): + """Rendering format parameter with ``'pdf'`` default.""" + + _format = DEFAULT_FORMAT + + _verify_format = staticmethod(verify_format) + + def __init__(self, *, format: typing.Optional[str] = None, **kwargs) -> None: + super().__init__(**kwargs) + + if format is not None: + self.format = format + + def _copy_kwargs(self, **kwargs): + """Return the kwargs to create a copy of the instance.""" + format = self._getattr_from_dict('_format') + if format is not None: + kwargs['format'] = format + return super()._copy_kwargs(**kwargs) + + @property + def format(self) -> str: + """The output format used for rendering + (``'pdf'``, ``'png'``, ...).""" + return self._format + + @format.setter + def format(self, format: str) -> None: + format = format.lower() + self._verify_format(format) + self._format = format diff --git a/.venv/Lib/site-packages/graphviz/parameters/formatters.py b/.venv/Lib/site-packages/graphviz/parameters/formatters.py new file mode 100644 index 00000000..ba9c16b2 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/parameters/formatters.py @@ -0,0 +1,61 @@ +"""Rendering formatter parameter handling.""" + +import typing + +from . import base + +__all__ = ['FORMATTERS', 'verify_formatter', 'Formatter'] + +FORMATTERS = {'cairo', + 'core', + 'gd', + 'gdiplus', + 'gdwbmp', + 'xlib'} + +REQUIRED = False + + +def verify_formatter(formatter: typing.Optional[str], *, + required: bool = REQUIRED) -> None: + if formatter is None: + if required: + raise ValueError('missing formatter') + elif formatter.lower() not in FORMATTERS: + raise ValueError(f'unknown formatter: {formatter!r}' + f' (must be None or one of {sorted(FORMATTERS)})') + + +class Formatter(base.ParameterBase): + """Rendering engine parameter (no default).""" + + _formatter = None + + _verify_formatter = staticmethod(verify_formatter) + + def __init__(self, *, formatter: typing.Optional[str] = None, **kwargs) -> None: + super().__init__(**kwargs) + + self.formatter = formatter + + def _copy_kwargs(self, **kwargs): + """Return the kwargs to create a copy of the instance.""" + formatter = self._getattr_from_dict('_formatter') + if formatter is not None: + kwargs['formatter'] = formatter + return super()._copy_kwargs(**kwargs) + + @property + def formatter(self) -> typing.Optional[str]: + """The output formatter used for rendering + (``'cairo'``, ``'gd'``, ...).""" + return self._formatter + + @formatter.setter + def formatter(self, formatter: typing.Optional[str]) -> None: + if formatter is None: + self.__dict__.pop('_formatter', None) + else: + formatter = formatter.lower() + self._verify_formatter(formatter) + self._formatter = formatter diff --git a/.venv/Lib/site-packages/graphviz/parameters/mixins.py b/.venv/Lib/site-packages/graphviz/parameters/mixins.py new file mode 100644 index 00000000..dbdec2f4 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/parameters/mixins.py @@ -0,0 +1,46 @@ +"""Mixin classes used to inherit parameter functionality.""" + +import typing + +from . import engines +from . import formats +from . import renderers +from . import formatters + +__all__ = ['Parameters'] + + +class Parameters(engines.Engine, formats.Format, + renderers.Renderer, formatters.Formatter): + """Parameters for calling ``graphviz.render()`` and ``graphviz.pipe()``.""" + + def _get_parameters(self, *, + engine: typing.Optional[str] = None, + format: typing.Optional[str] = None, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None, + verify: bool = False, + **kwargs): + if engine is None: + engine = self.engine + elif verify: + self._verify_engine(engine) + + if format is None: + format = self.format + elif verify: + self._verify_format(format) + + if renderer is None: + renderer = self.renderer + elif verify: + self._verify_renderer(renderer) + + if formatter is None: + formatter = self.formatter + elif verify: + self._verify_formatter(formatter) + + kwargs.update(engine=engine, format=format, + renderer=renderer, formatter=formatter) + return kwargs diff --git a/.venv/Lib/site-packages/graphviz/parameters/renderers.py b/.venv/Lib/site-packages/graphviz/parameters/renderers.py new file mode 100644 index 00000000..f20889d0 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/parameters/renderers.py @@ -0,0 +1,70 @@ +"""Rendering renderer parameter handling.""" + +import typing + +from . import base + +__all__ = ['RENDERERS', 'verify_renderer', 'Renderer'] + +RENDERERS = {'cairo', # $ dot -T: + 'dot', + 'fig', + 'gd', + 'gdiplus', + 'map', + 'pic', + 'pov', + 'ps', + 'svg', + 'tk', + 'vml', + 'vrml', + 'xdot'} + + +REQUIRED = False + + +def verify_renderer(renderer: typing.Optional[str], *, + required: bool = REQUIRED) -> None: + if renderer is None: + if required: + raise ValueError('missing renderer') + elif renderer.lower() not in RENDERERS: + raise ValueError(f'unknown renderer: {renderer!r}' + f' (must be None or one of {sorted(RENDERERS)})') + + +class Renderer(base.ParameterBase): + """Rendering renderer parameter (no default).""" + + _renderer = None + + _verify_renderer = staticmethod(verify_renderer) + + def __init__(self, *, renderer: typing.Optional[str] = None, **kwargs) -> None: + super().__init__(**kwargs) + + self.renderer = renderer + + def _copy_kwargs(self, **kwargs): + """Return the kwargs to create a copy of the instance.""" + renderer = self._getattr_from_dict('_renderer') + if renderer is not None: + kwargs['renderer'] = renderer + return super()._copy_kwargs(**kwargs) + + @property + def renderer(self) -> typing.Optional[str]: + """The output renderer used for rendering + (``'cairo'``, ``'gd'``, ...).""" + return self._renderer + + @renderer.setter + def renderer(self, renderer: typing.Optional[str]) -> None: + if renderer is None: + self.__dict__.pop('_renderer', None) + else: + renderer = renderer.lower() + self._verify_renderer(renderer) + self._renderer = renderer diff --git a/.venv/Lib/site-packages/graphviz/piping.py b/.venv/Lib/site-packages/graphviz/piping.py new file mode 100644 index 00000000..71767c23 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/piping.py @@ -0,0 +1,161 @@ +"""Pipe DOT code objects through Graphviz ``dot``.""" + +import codecs +import logging +import typing + +from . import _tools +from . import backend +from . import exceptions +from . import base +from . import encoding + +__all__ = ['Pipe'] + + +log = logging.getLogger(__name__) + + +class Pipe(encoding.Encoding, base.Base, backend.Pipe): + """Pipe source lines through the Graphviz layout command.""" + + @typing.overload + def pipe(self, + format: typing.Optional[str] = ..., + renderer: typing.Optional[str] = ..., + formatter: typing.Optional[str] = ..., + neato_no_op: typing.Union[bool, int, None] = ..., + quiet: bool = ..., *, + engine: typing.Optional[str] = ..., + encoding: None = ...) -> bytes: + """Return bytes with default ``encoding=None``.""" + + @typing.overload + def pipe(self, + format: typing.Optional[str] = ..., + renderer: typing.Optional[str] = ..., + formatter: typing.Optional[str] = ..., + neato_no_op: typing.Union[bool, int, None] = ..., + quiet: bool = ..., *, + engine: typing.Optional[str] = ..., + encoding: str) -> str: + """Return string when given encoding.""" + + @typing.overload + def pipe(self, + format: typing.Optional[str] = ..., + renderer: typing.Optional[str] = ..., + formatter: typing.Optional[str] = ..., + neato_no_op: typing.Union[bool, int, None] = ..., + quiet: bool = ..., *, + engine: typing.Optional[str] = ..., + encoding: typing.Optional[str]) -> typing.Union[bytes, str]: + """Return bytes or string depending on encoding argument.""" + + def pipe(self, + format: typing.Optional[str] = None, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None, + neato_no_op: typing.Union[bool, int, None] = None, + quiet: bool = False, *, + engine: typing.Optional[str] = None, + encoding: typing.Optional[str] = None) -> typing.Union[bytes, str]: + """Return the source piped through the Graphviz layout command. + + Args: + format: The output format used for rendering + (``'pdf'``, ``'png'``, etc.). + renderer: The output renderer used for rendering + (``'cairo'``, ``'gd'``, ...). + formatter: The output formatter used for rendering + (``'cairo'``, ``'gd'``, ...). + neato_no_op: Neato layout engine no-op flag. + quiet (bool): Suppress ``stderr`` output + from the layout subprocess. + engine: Layout engine for rendering + (``'dot'``, ``'neato'``, ...). + encoding: Encoding for decoding the stdout. + + Returns: + Bytes or if encoding is given decoded string + (stdout of the layout command). + + Raises: + ValueError: If ``engine``, ``format``, ``renderer``, or ``formatter`` + are unknown. + graphviz.RequiredArgumentError: If ``formatter`` is given + but ``renderer`` is None. + graphviz.ExecutableNotFound: If the Graphviz ``dot`` executable + is not found. + graphviz.CalledProcessError: If the returncode (exit status) + of the rendering ``dot`` subprocess is non-zero. + + Example: + >>> doctest_mark_exe() + >>> import graphviz + >>> source = 'graph { spam }' + >>> graphviz.Source(source, format='svg').pipe()[:14] + b'>> graphviz.Source(source, format='svg').pipe(encoding='ascii')[:14] + '>> graphviz.Source(source, format='svg').pipe(encoding='utf-8')[:14] + ' typing.Union[bytes, str]: + return self._pipe_future(format, + renderer=renderer, + formatter=formatter, + neato_no_op=neato_no_op, + quiet=quiet, + engine=engine, + encoding=encoding) + + def _pipe_future(self, format: typing.Optional[str] = None, *, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None, + neato_no_op: typing.Union[bool, int, None] = None, + quiet: bool = False, + engine: typing.Optional[str] = None, + encoding: typing.Optional[str] = None) -> typing.Union[bytes, str]: + args, kwargs = self._get_pipe_parameters(engine=engine, + format=format, + renderer=renderer, + formatter=formatter, + neato_no_op=neato_no_op, + quiet=quiet, + verify=True) + + args.append(iter(self)) + + if encoding is not None: + if codecs.lookup(encoding) is codecs.lookup(self.encoding): + # common case: both stdin and stdout need the same encoding + return self._pipe_lines_string(*args, encoding=encoding, **kwargs) + try: + raw = self._pipe_lines(*args, input_encoding=self.encoding, **kwargs) + except exceptions.CalledProcessError as e: + *args, output, stderr = e.args + if output is not None: + output = output.decode(self.encoding) + if stderr is not None: + stderr = stderr.decode(self.encoding) + raise e.__class__(*args, output=output, stderr=stderr) + else: + return raw.decode(encoding) + return self._pipe_lines(*args, input_encoding=self.encoding, **kwargs) diff --git a/.venv/Lib/site-packages/graphviz/quoting.py b/.venv/Lib/site-packages/graphviz/quoting.py new file mode 100644 index 00000000..e5297f8c --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/quoting.py @@ -0,0 +1,221 @@ +"""Quote strings to be valid DOT identifiers, assemble quoted attribute lists.""" + +import functools +import re +import typing +import warnings + +from . import _tools +from . import exceptions + +__all__ = ['quote', 'quote_edge', + 'a_list', 'attr_list', + 'escape', 'nohtml'] + +# https://www.graphviz.org/doc/info/lang.html +# https://www.graphviz.org/doc/info/attrs.html#k:escString + +HTML_STRING = re.compile(r'<.*>$', re.DOTALL) + +ID = re.compile(r'([a-zA-Z_][a-zA-Z0-9_]*|-?(\.[0-9]+|[0-9]+(\.[0-9]*)?))$') + +KEYWORDS = {'node', 'edge', 'graph', 'digraph', 'subgraph', 'strict'} + +COMPASS = {'n', 'ne', 'e', 'se', 's', 'sw', 'w', 'nw', 'c', '_'} # TODO + +FINAL_ODD_BACKSLASHES = re.compile(r'(?(?:\\{2})*) + \\? # treat \" same as " + (?P") + ''', flags=re.VERBOSE) + +ESCAPE_UNESCAPED_QUOTES = functools.partial(QUOTE_WITH_OPTIONAL_BACKSLASHES.sub, + r'\g' + r'\\' + r'\g') + + +@_tools.deprecate_positional_args(supported_number=1) +def quote(identifier: str, + is_html_string=HTML_STRING.match, + is_valid_id=ID.match, + dot_keywords=KEYWORDS, + endswith_odd_number_of_backslashes=FINAL_ODD_BACKSLASHES.search, + escape_unescaped_quotes=ESCAPE_UNESCAPED_QUOTES) -> str: + r"""Return DOT identifier from string, quote if needed. + + >>> quote('') # doctest: +NO_EXE + '""' + + >>> quote('spam') + 'spam' + + >>> quote('spam spam') + '"spam spam"' + + >>> quote('-4.2') + '-4.2' + + >>> quote('.42') + '.42' + + >>> quote('<spam>') + '<spam>' + + >>> quote(nohtml('<>')) + '"<>"' + + >>> print(quote('"')) + "\"" + + >>> print(quote('\\"')) + "\"" + + >>> print(quote('\\\\"')) + "\\\"" + + >>> print(quote('\\\\\\"')) + "\\\"" + """ + if is_html_string(identifier) and not isinstance(identifier, NoHtml): + pass + elif not is_valid_id(identifier) or identifier.lower() in dot_keywords: + if endswith_odd_number_of_backslashes(identifier): + warnings.warn('expect syntax error scanning invalid quoted string:' + f' {identifier!r}', + category=exceptions.DotSyntaxWarning) + return f'"{escape_unescaped_quotes(identifier)}"' + return identifier + + +def quote_edge(identifier: str) -> str: + """Return DOT edge statement node_id from string, quote if needed. + + >>> quote_edge('spam') # doctest: +NO_EXE + 'spam' + + >>> quote_edge('spam spam:eggs eggs') + '"spam spam":"eggs eggs"' + + >>> quote_edge('spam:eggs:s') + 'spam:eggs:s' + """ + node, _, rest = identifier.partition(':') + parts = [quote(node)] + if rest: + port, _, compass = rest.partition(':') + parts.append(quote(port)) + if compass: + parts.append(compass) + return ':'.join(parts) + + +@_tools.deprecate_positional_args(supported_number=1) +def a_list(label: typing.Optional[str] = None, + kwargs=None, attributes=None) -> str: + """Return assembled DOT a_list string. + + >>> a_list('spam', kwargs={'spam': None, 'ham': 'ham ham', 'eggs': ''}) # doctest: +NO_EXE + 'label=spam eggs="" ham="ham ham"' + """ + result = [f'label={quote(label)}'] if label is not None else [] + if kwargs: + result += [f'{quote(k)}={quote(v)}' + for k, v in _tools.mapping_items(kwargs) if v is not None] + if attributes: + if hasattr(attributes, 'items'): + attributes = _tools.mapping_items(attributes) + result += [f'{quote(k)}={quote(v)}' + for k, v in attributes if v is not None] + return ' '.join(result) + + +@_tools.deprecate_positional_args(supported_number=1) +def attr_list(label: typing.Optional[str] = None, + kwargs=None, attributes=None) -> str: + """Return assembled DOT attribute list string. + + Sorts ``kwargs`` and ``attributes`` if they are plain dicts + (to avoid unpredictable order from hash randomization in Python < 3.7). + + >>> attr_list() # doctest: +NO_EXE + '' + + >>> attr_list('spam spam', kwargs={'eggs': 'eggs', 'ham': 'ham ham'}) + ' [label="spam spam" eggs=eggs ham="ham ham"]' + + >>> attr_list(kwargs={'spam': None, 'eggs': ''}) + ' [eggs=""]' + """ + content = a_list(label, kwargs=kwargs, attributes=attributes) + if not content: + return '' + return f' [{content}]' + + +class Quote: + """Quote strings to be valid DOT identifiers, assemble quoted attribute lists.""" + + _quote = staticmethod(quote) + _quote_edge = staticmethod(quote_edge) + + _a_list = staticmethod(a_list) + _attr_list = staticmethod(attr_list) + + +def escape(s: str) -> str: + r"""Return string disabling special meaning of backslashes and ``'<...>'``. + + Args: + s: String in which backslashes and ``'<...>'`` + should be treated as literal. + + Returns: + Escaped string subclass instance. + + Raises: + TypeError: If ``s`` is not a ``str``. + + Example: + >>> import graphviz # doctest: +NO_EXE + >>> print(graphviz.escape(r'\l')) + \\l + + See also: + Upstream documentation: + https://www.graphviz.org/doc/info/attrs.html#k:escString + """ + return nohtml(s.replace('\\', '\\\\')) + + +class NoHtml(str): + """String subclass that does not treat ``'<...>'`` as DOT HTML string.""" + + __slots__ = () + + +def nohtml(s: str) -> str: + """Return string not treating ``'<...>'`` as DOT HTML string in quoting. + + Args: + s: String in which leading ``'<'`` and trailing ``'>'`` + should be treated as literal. + + Returns: + String subclass instance. + + Raises: + TypeError: If ``s`` is not a ``str``. + + Example: + >>> import graphviz # doctest: +NO_EXE + >>> g = graphviz.Graph() + >>> g.node(graphviz.nohtml('<>-*-<>')) + >>> print(g.source) # doctest: +NORMALIZE_WHITESPACE + graph { + "<>-*-<>" + } + """ + return NoHtml(s) diff --git a/.venv/Lib/site-packages/graphviz/rendering.py b/.venv/Lib/site-packages/graphviz/rendering.py new file mode 100644 index 00000000..191ce330 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/rendering.py @@ -0,0 +1,186 @@ +"""Save DOT code objects, render with Graphviz ``dot``, and open in viewer.""" + +import logging +import os +import pathlib +import typing + +from . import _tools +from . import backend +from . import saving + +__all__ = ['Render'] + + +log = logging.getLogger(__name__) + + +class Render(saving.Save, backend.Render, backend.View): + """Write source lines to file and render with Graphviz.""" + + @_tools.deprecate_positional_args(supported_number=2) + def render(self, + filename: typing.Union[os.PathLike, str, None] = None, + directory: typing.Union[os.PathLike, str, None] = None, + view: bool = False, + cleanup: bool = False, + format: typing.Optional[str] = None, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None, + neato_no_op: typing.Union[bool, int, None] = None, + quiet: bool = False, + quiet_view: bool = False, *, + outfile: typing.Union[os.PathLike, str, None] = None, + engine: typing.Optional[str] = None, + raise_if_result_exists: bool = False, + overwrite_source: bool = False) -> str: + r"""Save the source to file and render with the Graphviz engine. + + Args: + filename: Filename for saving the source + (defaults to ``name`` + ``'.gv'``).s + directory: (Sub)directory for source saving and rendering. + view (bool): Open the rendered result + with the default application. + cleanup (bool): Delete the source file + after successful rendering. + format: The output format used for rendering + (``'pdf'``, ``'png'``, etc.). + renderer: The output renderer used for rendering + (``'cairo'``, ``'gd'``, ...). + formatter: The output formatter used for rendering + (``'cairo'``, ``'gd'``, ...). + neato_no_op: Neato layout engine no-op flag. + quiet (bool): Suppress ``stderr`` output + from the layout subprocess. + quiet_view (bool): Suppress ``stderr`` output + from the viewer process + (implies ``view=True``, ineffective on Windows platform). + outfile: Path for the rendered output file. + engine: Layout engine for rendering + (``'dot'``, ``'neato'``, ...). + raise_if_result_exists: Raise :exc:`graphviz.FileExistsError` + if the result file exists. + overwrite_source: Allow ``dot`` to write to the file it reads from. + Incompatible with ``raise_if_result_exists``. + + Returns: + The (possibly relative) path of the rendered file. + + Raises: + ValueError: If ``engine``, ``format``, ``renderer``, or ``formatter`` + are unknown. + graphviz.RequiredArgumentError: If ``formatter`` is given + but ``renderer`` is None. + ValueError: If ``outfile`` is the same file as the source file + unless ``overwite_source=True``. + graphviz.ExecutableNotFound: If the Graphviz ``dot`` executable + is not found. + graphviz.CalledProcessError: If the returncode (exit status) + of the rendering ``dot`` subprocess is non-zero. + RuntimeError: If viewer opening is requested but not supported. + + Example: + >>> doctest_mark_exe() + >>> import graphviz + >>> dot = graphviz.Graph(name='spam', directory='doctest-output') + >>> dot.render(format='png').replace('\\', '/') + 'doctest-output/spam.gv.png' + >>> dot.render(outfile='spam.svg').replace('\\', '/') + 'doctest-output/spam.svg' + + Note: + The layout command is started from the directory of ``filepath``, + so that references to external files + (e.g. ``[image=images/camelot.png]``) + can be given as paths relative to the DOT source file. + """ + outfile = _tools.promote_pathlike(outfile) + if outfile is not None: + format = self._get_format(outfile, format=format) + if directory is None: + outfile = pathlib.Path(self.directory, outfile) + + args, kwargs = self._get_render_parameters(engine=engine, + format=format, + renderer=renderer, + formatter=formatter, + neato_no_op=neato_no_op, + quiet=quiet, + outfile=outfile, + raise_if_result_exists=raise_if_result_exists, + overwrite_source=overwrite_source, + verify=True) + + if outfile is not None and filename is None: + filename = self._get_filepath(outfile) + + filepath = self.save(filename, directory=directory, skip_existing=None) + + args.append(filepath) + + rendered = self._render(*args, **kwargs) + + if cleanup: + log.debug('delete %r', filepath) + os.remove(filepath) + + if quiet_view or view: + self._view(rendered, format=self._format, quiet=quiet_view) + + return rendered + + def _view(self, filepath: typing.Union[os.PathLike, str], *, + format: str, quiet: bool) -> None: + """Start the right viewer based on file format and platform.""" + methodnames = [ + f'_view_{format}_{backend.viewing.PLATFORM}', + f'_view_{backend.viewing.PLATFORM}', + ] + for name in methodnames: + view_method = getattr(self, name, None) + if view_method is not None: + break + else: + raise RuntimeError(f'{self.__class__!r} has no built-in viewer' + f' support for {format!r}' + f' on {backend.viewing.PLATFORM!r} platform') + view_method(filepath, quiet=quiet) + + @_tools.deprecate_positional_args(supported_number=2) + def view(self, + filename: typing.Union[os.PathLike, str, None] = None, + directory: typing.Union[os.PathLike, str, None] = None, + cleanup: bool = False, + quiet: bool = False, + quiet_view: bool = False) -> str: + """Save the source to file, open the rendered result in a viewer. + + Convenience short-cut for running ``.render(view=True)``. + + Args: + filename: Filename for saving the source + (defaults to ``name`` + ``'.gv'``). + directory: (Sub)directory for source saving and rendering. + cleanup (bool): Delete the source file after successful rendering. + quiet (bool): Suppress ``stderr`` output from the layout subprocess. + quiet_view (bool): Suppress ``stderr`` output + from the viewer process (ineffective on Windows). + + Returns: + The (possibly relative) path of the rendered file. + + Raises: + graphviz.ExecutableNotFound: If the Graphviz executable + is not found. + graphviz.CalledProcessError: If the exit status is non-zero. + RuntimeError: If opening the viewer is not supported. + + Short-cut method for calling :meth:`.render` with ``view=True``. + + Note: + There is no option to wait for the application to close, + and no way to retrieve the application's exit status. + """ + return self.render(filename=filename, directory=directory, view=True, + cleanup=cleanup, quiet=quiet, quiet_view=quiet_view) diff --git a/.venv/Lib/site-packages/graphviz/saving.py b/.venv/Lib/site-packages/graphviz/saving.py new file mode 100644 index 00000000..0a95e8f5 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/saving.py @@ -0,0 +1,83 @@ +"""Save DOT source lines to a file.""" + +import logging +import os +import typing + +from . import _defaults +from . import _tools +from . import base +from . import encoding + +__all__ = ['Save'] + +log = logging.getLogger(__name__) + + +class Save(encoding.Encoding, base.Base): + """Save DOT source lines to file.""" + + directory: typing.Union[str, bytes] = '' + + _default_extension = _defaults.DEFAULT_SOURCE_EXTENSION + + _mkdirs = staticmethod(_tools.mkdirs) + + def __init__(self, *, + filename: typing.Union[os.PathLike, str], + directory: typing.Union[os.PathLike, str, None] = None, + **kwargs) -> None: + super().__init__(**kwargs) + + if filename is None: + filename = f'{self.__class__.__name__}.{self._default_extension}' + + self.filename = os.fspath(filename) + """str: Target file name for saving the DOT source file.""" + + if directory is not None: + self.directory = os.fspath(directory) + + def _copy_kwargs(self, **kwargs): + """Return the kwargs to create a copy of the instance.""" + assert 'directory' not in kwargs + if 'directory' in self.__dict__: + kwargs['directory'] = self.directory + return super()._copy_kwargs(filename=self.filename, **kwargs) + + @property + def filepath(self) -> str: + """The target path for saving the DOT source file.""" + return os.path.join(self.directory, self.filename) + + @_tools.deprecate_positional_args(supported_number=2) + def save(self, filename: typing.Union[os.PathLike, str, None] = None, + directory: typing.Union[os.PathLike, str, None] = None, *, + skip_existing: typing.Optional[bool] = False) -> str: + """Save the DOT source to file. Ensure the file ends with a newline. + + Args: + filename: Filename for saving the source (defaults to ``name`` + ``'.gv'``) + directory: (Sub)directory for source saving and rendering. + skip_existing: Skip write if file exists (default: ``False``). + + Returns: + The (possibly relative) path of the saved source file. + """ + if filename is not None: + self.filename = filename + if directory is not None: + self.directory = directory + + filepath = self.filepath + if skip_existing and os.path.exists(filepath): + return filepath + + self._mkdirs(filepath) + + log.debug('write lines to %r', filepath) + with open(filepath, 'w', encoding=self.encoding) as fd: + for uline in self: + fd.write(uline) + + return filepath diff --git a/.venv/Lib/site-packages/graphviz/sources.py b/.venv/Lib/site-packages/graphviz/sources.py new file mode 100644 index 00000000..12ec1631 --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/sources.py @@ -0,0 +1,147 @@ +"""Save DOT code objects, render with Graphviz dot, and open in viewer.""" + +import locale +import logging +import os +import typing + +from .encoding import DEFAULT_ENCODING +from . import _tools +from . import saving +from . import jupyter_integration +from . import piping +from . import rendering +from . import unflattening + +__all__ = ['Source'] + + +log = logging.getLogger(__name__) + + +class Source(rendering.Render, saving.Save, + jupyter_integration.JupyterIntegration, piping.Pipe, + unflattening.Unflatten): + """Verbatim DOT source code string to be rendered by Graphviz. + + Args: + source: The verbatim DOT source code string. + filename: Filename for saving the source (defaults to ``'Source.gv'``). + directory: (Sub)directory for source saving and rendering. + format: Rendering output format (``'pdf'``, ``'png'``, ...). + engine: Layout engine used (``'dot'``, ``'neato'``, ...). + encoding: Encoding for saving the source. + + Note: + All parameters except ``source`` are optional. All of them + can be changed under their corresponding attribute name + after instance creation. + """ + + @classmethod + @_tools.deprecate_positional_args(supported_number=2) + def from_file(cls, filename: typing.Union[os.PathLike, str], + directory: typing.Union[os.PathLike, str, None] = None, + format: typing.Optional[str] = None, + engine: typing.Optional[str] = None, + encoding: typing.Optional[str] = DEFAULT_ENCODING, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None) -> 'Source': + """Return an instance with the source string read from the given file. + + Args: + filename: Filename for loading/saving the source. + directory: (Sub)directory for source loading/saving and rendering. + format: Rendering output format (``'pdf'``, ``'png'``, ...). + engine: Layout command used (``'dot'``, ``'neato'``, ...). + encoding: Encoding for loading/saving the source. + """ + directory = _tools.promote_pathlike_directory(directory) + filepath = (os.path.join(directory, filename) if directory.parts + else os.fspath(filename)) + + if encoding is None: + encoding = locale.getpreferredencoding() + + log.debug('read %r with encoding %r', filepath, encoding) + with open(filepath, encoding=encoding) as fd: + source = fd.read() + + return cls(source, + filename=filename, directory=directory, + format=format, engine=engine, encoding=encoding, + renderer=renderer, formatter=formatter, + loaded_from_path=filepath) + + @_tools.deprecate_positional_args(supported_number=2) + def __init__(self, source: str, + filename: typing.Union[os.PathLike, str, None] = None, + directory: typing.Union[os.PathLike, str, None] = None, + format: typing.Optional[str] = None, + engine: typing.Optional[str] = None, + encoding: typing.Optional[str] = DEFAULT_ENCODING, *, + renderer: typing.Optional[str] = None, + formatter: typing.Optional[str] = None, + loaded_from_path: typing.Optional[os.PathLike] = None) -> None: + super().__init__(filename=filename, directory=directory, + format=format, engine=engine, + renderer=renderer, formatter=formatter, + encoding=encoding) + self._loaded_from_path = loaded_from_path + self._source = source + + # work around pytype false alarm + _source: str + _loaded_from_path: typing.Optional[os.PathLike] + + def _copy_kwargs(self, **kwargs): + """Return the kwargs to create a copy of the instance.""" + return super()._copy_kwargs(source=self._source, + loaded_from_path=self._loaded_from_path, + **kwargs) + + def __iter__(self) -> typing.Iterator[str]: + r"""Yield the DOT source code read from file line by line. + + Yields: Line ending with a newline (``'\n'``). + """ + lines = self._source.splitlines(keepends=True) + yield from lines[:-1] + for line in lines[-1:]: + suffix = '\n' if not line.endswith('\n') else '' + yield line + suffix + + @property + def source(self) -> str: + """The DOT source code as string. + + Normalizes so that the string always ends in a final newline. + """ + source = self._source + if not source.endswith('\n'): + source += '\n' + return source + + @_tools.deprecate_positional_args(supported_number=2) + def save(self, filename: typing.Union[os.PathLike, str, None] = None, + directory: typing.Union[os.PathLike, str, None] = None, *, + skip_existing: typing.Optional[bool] = None) -> str: + """Save the DOT source to file. Ensure the file ends with a newline. + + Args: + filename: Filename for saving the source (defaults to ``name`` + ``'.gv'``) + directory: (Sub)directory for source saving and rendering. + skip_existing: Skip write if file exists (default: ``None``). + By default skips if instance was loaded from the target path: + ``.from_file(self.filepath)``. + + Returns: + The (possibly relative) path of the saved source file. + """ + skip = (skip_existing is None and self._loaded_from_path + and os.path.samefile(self._loaded_from_path, self.filepath)) + if skip: + log.debug('.save(skip_existing=None) skip writing Source.from_file(%r)', + self.filepath) + return super().save(filename=filename, directory=directory, + skip_existing=skip) diff --git a/.venv/Lib/site-packages/graphviz/unflattening.py b/.venv/Lib/site-packages/graphviz/unflattening.py new file mode 100644 index 00000000..c87cc7db --- /dev/null +++ b/.venv/Lib/site-packages/graphviz/unflattening.py @@ -0,0 +1,63 @@ +"""Pipe source through the Graphviz *unflatten* preprocessor.""" + +import typing + +import graphviz +from . import _tools +from . import base +from . import backend +from . import encoding + +__all__ = ['Unflatten'] + + +class Unflatten(encoding.Encoding, base.Base, backend.Unflatten): + """Pipe source through the Graphviz *unflatten* preprocessor.""" + + @_tools.deprecate_positional_args(supported_number=1) + def unflatten(self, + stagger: typing.Optional[int] = None, + fanout: bool = False, + chain: typing.Optional[int] = None) -> 'graphviz.Source': + """Return a new :class:`.Source` instance with the source + piped through the Graphviz *unflatten* preprocessor. + + Args: + stagger: Stagger the minimum length + of leaf edges between 1 and this small integer. + fanout: Fanout nodes with indegree = outdegree = 1 + when staggering (requires ``stagger``). + chain: Form disconnected nodes into chains + of up to this many nodes. + + Returns: + Prepocessed DOT source code (improved layout aspect ratio). + + Raises: + graphviz.RequiredArgumentError: If ``fanout`` is given + but ``stagger`` is None. + graphviz.ExecutableNotFound: If the Graphviz ``unflatten`` executable + is not found. + graphviz.CalledProcessError: If the returncode (exit status) + of the unflattening 'unflatten' subprocess is non-zero. + + See also: + Upstream documentation: + https://www.graphviz.org/pdf/unflatten.1.pdf + """ + from . import sources + + out = self._unflatten(self.source, + stagger=stagger, fanout=fanout, chain=chain, + encoding=self.encoding) + + kwargs = self._copy_kwargs() + return sources.Source(out, + filename=kwargs.get('filename'), + directory=kwargs.get('directory'), + format=kwargs.get('format'), + engine=kwargs.get('engine'), + encoding=kwargs.get('encoding'), + renderer=kwargs.get('renderer'), + formatter=kwargs.get('formatter'), + loaded_from_path=None) diff --git a/.venv/Lib/site-packages/identify-2.6.9.dist-info/INSTALLER b/.venv/Lib/site-packages/identify-2.6.9.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/identify-2.6.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/identify-2.6.9.dist-info/LICENSE b/.venv/Lib/site-packages/identify-2.6.9.dist-info/LICENSE new file mode 100644 index 00000000..0ecc5db9 --- /dev/null +++ b/.venv/Lib/site-packages/identify-2.6.9.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2017 Chris Kuehl, Anthony Sottile + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/.venv/Lib/site-packages/identify-2.6.9.dist-info/METADATA b/.venv/Lib/site-packages/identify-2.6.9.dist-info/METADATA new file mode 100644 index 00000000..c1ba9ecc --- /dev/null +++ b/.venv/Lib/site-packages/identify-2.6.9.dist-info/METADATA @@ -0,0 +1,155 @@ +Metadata-Version: 2.1 +Name: identify +Version: 2.6.9 +Summary: File identification library for Python +Home-page: https://github.com/pre-commit/identify +Author: Chris Kuehl +Author-email: ckuehl@ocf.berkeley.edu +License: MIT +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: license +Requires-Dist: ukkonen; extra == "license" + +[![build status](https://github.com/pre-commit/identify/actions/workflows/main.yml/badge.svg)](https://github.com/pre-commit/identify/actions/workflows/main.yml) +[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/pre-commit/identify/main.svg)](https://results.pre-commit.ci/latest/github/pre-commit/identify/main) + +identify +======== + +File identification library for Python. + +Given a file (or some information about a file), return a set of standardized +tags identifying what the file is. + +## Installation + +```bash +pip install identify +``` + +## Usage +### With a file on disk + +If you have an actual file on disk, you can get the most information possible +(a superset of all other methods): + +```python +>>> from identify import identify +>>> identify.tags_from_path('/path/to/file.py') +{'file', 'text', 'python', 'non-executable'} +>>> identify.tags_from_path('/path/to/file-with-shebang') +{'file', 'text', 'shell', 'bash', 'executable'} +>>> identify.tags_from_path('/bin/bash') +{'file', 'binary', 'executable'} +>>> identify.tags_from_path('/path/to/directory') +{'directory'} +>>> identify.tags_from_path('/path/to/symlink') +{'symlink'} +``` + +When using a file on disk, the checks performed are: + +* File type (file, symlink, directory, socket) +* Mode (is it executable?) +* File name (mostly based on extension) +* If executable, the shebang is read and the interpreter interpreted + + +### If you only have the filename + +```python +>>> identify.tags_from_filename('file.py') +{'text', 'python'} +``` + + +### If you only have the interpreter + +```python +>>> identify.tags_from_interpreter('python3.5') +{'python', 'python3'} +>>> identify.tags_from_interpreter('bash') +{'shell', 'bash'} +>>> identify.tags_from_interpreter('some-unrecognized-thing') +set() +``` + +### As a cli + +``` +$ identify-cli --help +usage: identify-cli [-h] [--filename-only] path + +positional arguments: + path + +optional arguments: + -h, --help show this help message and exit + --filename-only +``` + +```console +$ identify-cli setup.py; echo $? +["file", "non-executable", "python", "text"] +0 +$ identify-cli setup.py --filename-only; echo $? +["python", "text"] +0 +$ identify-cli wat.wat; echo $? +wat.wat does not exist. +1 +$ identify-cli wat.wat --filename-only; echo $? +1 +``` + +### Identifying LICENSE files + +`identify` also has an api for determining what type of license is contained +in a file. This routine is roughly based on the approaches used by +[licensee] (the ruby gem that github uses to figure out the license for a +repo). + +The approach that `identify` uses is as follows: + +1. Strip the copyright line +2. Normalize all whitespace +3. Return any exact matches +4. Return the closest by edit distance (where edit distance < 5%) + +To use the api, install via `pip install identify[license]` + +```pycon +>>> from identify import identify +>>> identify.license_id('LICENSE') +'MIT' +``` + +The return value of the `license_id` function is an [SPDX] id. Currently +licenses are sourced from [choosealicense.com]. + +[licensee]: https://github.com/benbalter/licensee +[SPDX]: https://spdx.org/licenses/ +[choosealicense.com]: https://github.com/github/choosealicense.com + +## How it works + +A call to `tags_from_path` does this: + +1. What is the type: file, symlink, directory? If it's not file, stop here. +2. Is it executable? Add the appropriate tag. +3. Do we recognize the file extension? If so, add the appropriate tags, stop + here. These tags would include binary/text. +4. Peek at the first X bytes of the file. Use these to determine whether it is + binary or text, add the appropriate tag. +5. If identified as text above, try to read and interpret the shebang, and add + appropriate tags. + +By design, this means we don't need to partially read files where we recognize +the file extension. diff --git a/.venv/Lib/site-packages/identify-2.6.9.dist-info/RECORD b/.venv/Lib/site-packages/identify-2.6.9.dist-info/RECORD new file mode 100644 index 00000000..60a74bf6 --- /dev/null +++ b/.venv/Lib/site-packages/identify-2.6.9.dist-info/RECORD @@ -0,0 +1,23 @@ +../../Scripts/identify-cli.exe,sha256=_5li4I-kDviA0vT3CUNALylCMXcci1iQd307Su7SWDc,108389 +identify-2.6.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +identify-2.6.9.dist-info/LICENSE,sha256=7bwq07cIT6yHPhsbRQ_TcNkU65f38xJjtP9l9H3BDEM,1072 +identify-2.6.9.dist-info/METADATA,sha256=Q4v6LfG8mH2pVSvZTs9NlEjygJiKW8KPZHpPfGkRs5M,4422 +identify-2.6.9.dist-info/RECORD,, +identify-2.6.9.dist-info/WHEEL,sha256=qUzzGenXXuJTzyjFah76kDVqDvnk-YDzY00svnrl84w,109 +identify-2.6.9.dist-info/entry_points.txt,sha256=1NoWWCLrGAFJ5SyQ063qYZwhSobmnuTHx5XRDCPwnN0,51 +identify-2.6.9.dist-info/top_level.txt,sha256=Rlt8stwsb21b0aSlWbp_2EuNfX8sdPOvAEF0-FxGGVs,9 +identify/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +identify/__pycache__/__init__.cpython-312.pyc,, +identify/__pycache__/cli.cpython-312.pyc,, +identify/__pycache__/extensions.cpython-312.pyc,, +identify/__pycache__/identify.cpython-312.pyc,, +identify/__pycache__/interpreters.cpython-312.pyc,, +identify/cli.py,sha256=XlB--Wr_VxmChg_RepRqiv5nl4e5rhgP9oPc8r3IiUQ,739 +identify/extensions.py,sha256=Irne5vVMWp9R5zhUF2_D-kYbl8OUN4paRmngcKlO_T0,13884 +identify/identify.py,sha256=ztEFpuDaTBJzyPSnFkqnmtIbZbbzF7nRS6etsgd6q5w,7916 +identify/interpreters.py,sha256=U3NPE-TObtRUB6XdvOxOmce9gtnyabgY0bXyxHjyyX8,688 +identify/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +identify/vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +identify/vendor/__pycache__/__init__.cpython-312.pyc,, +identify/vendor/__pycache__/licenses.cpython-312.pyc,, +identify/vendor/licenses.py,sha256=jsArrb7_5qdhbkeIrDScNWhs9IZ1TEw_J81m05FZQBw,335105 diff --git a/.venv/Lib/site-packages/identify-2.6.9.dist-info/WHEEL b/.venv/Lib/site-packages/identify-2.6.9.dist-info/WHEEL new file mode 100644 index 00000000..de294b9e --- /dev/null +++ b/.venv/Lib/site-packages/identify-2.6.9.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (74.1.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/.venv/Lib/site-packages/identify-2.6.9.dist-info/entry_points.txt b/.venv/Lib/site-packages/identify-2.6.9.dist-info/entry_points.txt new file mode 100644 index 00000000..66a1001a --- /dev/null +++ b/.venv/Lib/site-packages/identify-2.6.9.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +identify-cli = identify.cli:main diff --git a/.venv/Lib/site-packages/identify-2.6.9.dist-info/top_level.txt b/.venv/Lib/site-packages/identify-2.6.9.dist-info/top_level.txt new file mode 100644 index 00000000..5c5444c1 --- /dev/null +++ b/.venv/Lib/site-packages/identify-2.6.9.dist-info/top_level.txt @@ -0,0 +1 @@ +identify diff --git a/.venv/Lib/site-packages/identify/__init__.py b/.venv/Lib/site-packages/identify/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/identify/cli.py b/.venv/Lib/site-packages/identify/cli.py new file mode 100644 index 00000000..98c77bd6 --- /dev/null +++ b/.venv/Lib/site-packages/identify/cli.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +import argparse +import json +from collections.abc import Sequence + +from identify import identify + + +def main(argv: Sequence[str] | None = None) -> int: + parser = argparse.ArgumentParser() + parser.add_argument('--filename-only', action='store_true') + parser.add_argument('path') + args = parser.parse_args(argv) + + if args.filename_only: + func = identify.tags_from_filename + else: + func = identify.tags_from_path + + try: + tags = sorted(func(args.path)) + except ValueError as e: + print(e) + return 1 + + if not tags: + return 1 + else: + print(json.dumps(tags)) + return 0 + + +if __name__ == '__main__': + raise SystemExit(main()) diff --git a/.venv/Lib/site-packages/identify/extensions.py b/.venv/Lib/site-packages/identify/extensions.py new file mode 100644 index 00000000..c5d680a5 --- /dev/null +++ b/.venv/Lib/site-packages/identify/extensions.py @@ -0,0 +1,406 @@ +from __future__ import annotations +EXTENSIONS = { + 'adoc': {'text', 'asciidoc'}, + 'ai': {'binary', 'adobe-illustrator'}, + 'aj': {'text', 'aspectj'}, + 'asciidoc': {'text', 'asciidoc'}, + 'apinotes': {'text', 'apinotes'}, + 'asar': {'binary', 'asar'}, + 'asm': {'text', 'asm'}, + 'astro': {'text', 'astro'}, + 'avif': {'binary', 'image', 'avif'}, + 'avsc': {'text', 'avro-schema'}, + 'bash': {'text', 'shell', 'bash'}, + 'bat': {'text', 'batch'}, + 'bats': {'text', 'shell', 'bash', 'bats'}, + 'bazel': {'text', 'bazel'}, + 'bb': {'text', 'bitbake'}, + 'bbappend': {'text', 'bitbake'}, + 'bbclass': {'text', 'bitbake'}, + 'beancount': {'text', 'beancount'}, + 'bib': {'text', 'bib'}, + 'bmp': {'binary', 'image', 'bitmap'}, + 'bz2': {'binary', 'bzip2'}, + 'bz3': {'binary', 'bzip3'}, + 'bzl': {'text', 'bazel'}, + 'c': {'text', 'c'}, + 'c++': {'text', 'c++'}, + 'c++m': {'text', 'c++'}, + 'cc': {'text', 'c++'}, + 'ccm': {'text', 'c++'}, + 'cfg': {'text'}, + 'chs': {'text', 'c2hs'}, + 'cjs': {'text', 'javascript'}, + 'clj': {'text', 'clojure'}, + 'cljc': {'text', 'clojure'}, + 'cljs': {'text', 'clojure', 'clojurescript'}, + 'cmake': {'text', 'cmake'}, + 'cnf': {'text'}, + 'coffee': {'text', 'coffee'}, + 'conf': {'text'}, + 'cpp': {'text', 'c++'}, + 'cppm': {'text', 'c++'}, + 'cr': {'text', 'crystal'}, + 'crt': {'text', 'pem'}, + 'cs': {'text', 'c#'}, + 'csproj': {'text', 'xml', 'csproj'}, + 'csh': {'text', 'shell', 'csh'}, + 'cson': {'text', 'cson'}, + 'css': {'text', 'css'}, + 'csv': {'text', 'csv'}, + 'csx': {'text', 'c#', 'c#script'}, + 'cu': {'text', 'cuda'}, + 'cue': {'text', 'cue'}, + 'cuh': {'text', 'cuda'}, + 'cxx': {'text', 'c++'}, + 'cxxm': {'text', 'c++'}, + 'cylc': {'text', 'cylc'}, + 'dart': {'text', 'dart'}, + 'dbc': {'text', 'dbc'}, + 'def': {'text', 'def'}, + 'dll': {'binary'}, + 'dtd': {'text', 'dtd'}, + 'ear': {'binary', 'zip', 'jar'}, + 'edn': {'text', 'clojure', 'edn'}, + 'ejs': {'text', 'ejs'}, + 'ejson': {'text', 'json', 'ejson'}, + 'elm': {'text', 'elm'}, + 'env': {'text', 'dotenv'}, + 'eot': {'binary', 'eot'}, + 'eps': {'binary', 'eps'}, + 'erb': {'text', 'erb'}, + 'erl': {'text', 'erlang'}, + 'ex': {'text', 'elixir'}, + 'exe': {'binary'}, + 'exs': {'text', 'elixir'}, + 'eyaml': {'text', 'yaml'}, + 'f03': {'text', 'fortran'}, + 'f08': {'text', 'fortran'}, + 'f90': {'text', 'fortran'}, + 'f95': {'text', 'fortran'}, + 'feature': {'text', 'gherkin'}, + 'fish': {'text', 'fish'}, + 'fits': {'binary', 'fits'}, + 'fs': {'text', 'f#'}, + 'fsx': {'text', 'f#', 'f#script'}, + 'gd': {'text', 'gdscript'}, + 'gemspec': {'text', 'ruby'}, + 'geojson': {'text', 'geojson', 'json'}, + 'ggb': {'binary', 'zip', 'ggb'}, + 'gif': {'binary', 'image', 'gif'}, + 'gleam': {'text', 'gleam'}, + 'go': {'text', 'go'}, + 'gotmpl': {'text', 'gotmpl'}, + 'gpx': {'text', 'gpx', 'xml'}, + 'graphql': {'text', 'graphql'}, + 'gradle': {'text', 'groovy'}, + 'groovy': {'text', 'groovy'}, + 'gyb': {'text', 'gyb'}, + 'gyp': {'text', 'gyp', 'python'}, + 'gypi': {'text', 'gyp', 'python'}, + 'gz': {'binary', 'gzip'}, + 'h': {'text', 'header', 'c', 'c++'}, + 'hbs': {'text', 'handlebars'}, + 'hcl': {'text', 'hcl'}, + 'hh': {'text', 'header', 'c++'}, + 'hpp': {'text', 'header', 'c++'}, + 'hrl': {'text', 'erlang'}, + 'hs': {'text', 'haskell'}, + 'htm': {'text', 'html'}, + 'html': {'text', 'html'}, + 'hxx': {'text', 'header', 'c++'}, + 'icns': {'binary', 'icns'}, + 'ico': {'binary', 'icon'}, + 'ics': {'text', 'icalendar'}, + 'idl': {'text', 'idl'}, + 'idr': {'text', 'idris'}, + 'inc': {'text', 'inc'}, + 'ini': {'text', 'ini'}, + 'inl': {'text', 'inl', 'c++'}, + 'ino': {'text', 'ino', 'c++'}, + 'inx': {'text', 'xml', 'inx'}, + 'ipynb': {'text', 'jupyter', 'json'}, + 'ixx': {'text', 'c++'}, + 'j2': {'text', 'jinja'}, + 'jade': {'text', 'jade'}, + 'jar': {'binary', 'zip', 'jar'}, + 'java': {'text', 'java'}, + 'jenkins': {'text', 'groovy', 'jenkins'}, + 'jenkinsfile': {'text', 'groovy', 'jenkins'}, + 'jinja': {'text', 'jinja'}, + 'jinja2': {'text', 'jinja'}, + 'jl': {'text', 'julia'}, + 'jpeg': {'binary', 'image', 'jpeg'}, + 'jpg': {'binary', 'image', 'jpeg'}, + 'js': {'text', 'javascript'}, + 'json': {'text', 'json'}, + 'jsonld': {'text', 'json', 'jsonld'}, + 'jsonnet': {'text', 'jsonnet'}, + 'json5': {'text', 'json5'}, + 'jsx': {'text', 'jsx'}, + 'key': {'text', 'pem'}, + 'kml': {'text', 'kml', 'xml'}, + 'kt': {'text', 'kotlin'}, + 'kts': {'text', 'kotlin'}, + 'lean': {'text', 'lean'}, + 'lektorproject': {'text', 'ini', 'lektorproject'}, + 'less': {'text', 'less'}, + 'lfm': {'text', 'lazarus', 'lazarus-form'}, + 'lhs': {'text', 'literate-haskell'}, + 'libsonnet': {'text', 'jsonnet'}, + 'lidr': {'text', 'idris'}, + 'liquid': {'text', 'liquid'}, + 'lpi': {'text', 'lazarus', 'xml'}, + 'lpr': {'text', 'lazarus', 'pascal'}, + 'lr': {'text', 'lektor'}, + 'lua': {'text', 'lua'}, + 'm': {'text', 'objective-c'}, + 'm4': {'text', 'm4'}, + 'make': {'text', 'makefile'}, + 'manifest': {'text', 'manifest'}, + 'map': {'text', 'map'}, + 'markdown': {'text', 'markdown'}, + 'md': {'text', 'markdown'}, + 'mdx': {'text', 'mdx'}, + 'meson': {'text', 'meson'}, + 'metal': {'text', 'metal'}, + 'mib': {'text', 'mib'}, + 'mjs': {'text', 'javascript'}, + 'mk': {'text', 'makefile'}, + 'ml': {'text', 'ocaml'}, + 'mli': {'text', 'ocaml'}, + 'mm': {'text', 'c++', 'objective-c++'}, + 'modulemap': {'text', 'modulemap'}, + 'mscx': {'text', 'xml', 'musescore'}, + 'mscz': {'binary', 'zip', 'musescore'}, + 'mustache': {'text', 'mustache'}, + 'myst': {'text', 'myst'}, + 'ngdoc': {'text', 'ngdoc'}, + 'nim': {'text', 'nim'}, + 'nims': {'text', 'nim'}, + 'nimble': {'text', 'nimble'}, + 'nix': {'text', 'nix'}, + 'njk': {'text', 'nunjucks'}, + 'otf': {'binary', 'otf'}, + 'p12': {'binary', 'p12'}, + 'pas': {'text', 'pascal'}, + 'patch': {'text', 'diff'}, + 'pdf': {'binary', 'pdf'}, + 'pem': {'text', 'pem'}, + 'php': {'text', 'php'}, + 'php4': {'text', 'php'}, + 'php5': {'text', 'php'}, + 'phtml': {'text', 'php'}, + 'pl': {'text', 'perl'}, + 'plantuml': {'text', 'plantuml'}, + 'pm': {'text', 'perl'}, + 'png': {'binary', 'image', 'png'}, + 'po': {'text', 'pofile'}, + 'pom': {'pom', 'text', 'xml'}, + 'pp': {'text', 'puppet'}, + 'prisma': {'text', 'prisma'}, + 'properties': {'text', 'java-properties'}, + 'proto': {'text', 'proto'}, + 'ps1': {'text', 'powershell'}, + 'psd1': {'text', 'powershell'}, + 'psm1': {'text', 'powershell'}, + 'pug': {'text', 'pug'}, + 'puml': {'text', 'plantuml'}, + 'purs': {'text', 'purescript'}, + 'pxd': {'text', 'cython'}, + 'pxi': {'text', 'cython'}, + 'py': {'text', 'python'}, + 'pyi': {'text', 'pyi'}, + 'pyproj': {'text', 'xml', 'pyproj'}, + 'pyt': {'text', 'python'}, + 'pyx': {'text', 'cython'}, + 'pyz': {'binary', 'pyz'}, + 'pyzw': {'binary', 'pyz'}, + 'qml': {'text', 'qml'}, + 'r': {'text', 'r'}, + 'rake': {'text', 'ruby'}, + 'rb': {'text', 'ruby'}, + 'resx': {'text', 'resx', 'xml'}, + 'rng': {'text', 'xml', 'relax-ng'}, + 'rs': {'text', 'rust'}, + 'rst': {'text', 'rst'}, + 's': {'text', 'asm'}, + 'sas': {'text', 'sas'}, + 'sass': {'text', 'sass'}, + 'sbt': {'text', 'sbt', 'scala'}, + 'sc': {'text', 'scala'}, + 'scala': {'text', 'scala'}, + 'scm': {'text', 'scheme'}, + 'scss': {'text', 'scss'}, + 'sh': {'text', 'shell'}, + 'sln': {'text', 'sln'}, + 'sls': {'text', 'salt'}, + 'so': {'binary'}, + 'sol': {'text', 'solidity'}, + 'spec': {'text', 'spec'}, + 'sql': {'text', 'sql'}, + 'ss': {'text', 'scheme'}, + 'sty': {'text', 'tex'}, + 'styl': {'text', 'stylus'}, + 'sv': {'text', 'system-verilog'}, + 'svelte': {'text', 'svelte'}, + 'svg': {'text', 'image', 'svg', 'xml'}, + 'svh': {'text', 'system-verilog'}, + 'swf': {'binary', 'swf'}, + 'swift': {'text', 'swift'}, + 'swiftdeps': {'text', 'swiftdeps'}, + 'tac': {'text', 'twisted', 'python'}, + 'tar': {'binary', 'tar'}, + 'templ': {'text', 'templ'}, + 'tex': {'text', 'tex'}, + 'textproto': {'text', 'textproto'}, + 'tf': {'text', 'terraform'}, + 'tfvars': {'text', 'terraform'}, + 'tgz': {'binary', 'gzip'}, + 'thrift': {'text', 'thrift'}, + 'tiff': {'binary', 'image', 'tiff'}, + 'toml': {'text', 'toml'}, + 'ts': {'text', 'ts'}, + 'tsv': {'text', 'tsv'}, + 'tsx': {'text', 'tsx'}, + 'ttf': {'binary', 'ttf'}, + 'twig': {'text', 'twig'}, + 'txsprofile': {'text', 'ini', 'txsprofile'}, + 'txt': {'text', 'plain-text'}, + 'txtpb': {'text', 'textproto'}, + 'urdf': {'text', 'xml', 'urdf'}, + 'v': {'text', 'verilog'}, + 'vb': {'text', 'vb'}, + 'vbproj': {'text', 'xml', 'vbproj'}, + 'vcxproj': {'text', 'xml', 'vcxproj'}, + 'vdx': {'text', 'vdx'}, + 'vh': {'text', 'verilog'}, + 'vhd': {'text', 'vhdl'}, + 'vim': {'text', 'vim'}, + 'vtl': {'text', 'vtl'}, + 'vue': {'text', 'vue'}, + 'war': {'binary', 'zip', 'jar'}, + 'wav': {'binary', 'audio', 'wav'}, + 'webp': {'binary', 'image', 'webp'}, + 'whl': {'binary', 'wheel', 'zip'}, + 'wkt': {'text', 'wkt'}, + 'woff': {'binary', 'woff'}, + 'woff2': {'binary', 'woff2'}, + 'wsdl': {'text', 'xml', 'wsdl'}, + 'wsgi': {'text', 'wsgi', 'python'}, + 'xhtml': {'text', 'xml', 'html', 'xhtml'}, + 'xacro': {'text', 'xml', 'urdf', 'xacro'}, + 'xctestplan': {'text', 'json'}, + 'xml': {'text', 'xml'}, + 'xq': {'text', 'xquery'}, + 'xql': {'text', 'xquery'}, + 'xqm': {'text', 'xquery'}, + 'xqu': {'text', 'xquery'}, + 'xquery': {'text', 'xquery'}, + 'xqy': {'text', 'xquery'}, + 'xsd': {'text', 'xml', 'xsd'}, + 'xsl': {'text', 'xml', 'xsl'}, + 'yaml': {'text', 'yaml'}, + 'yamlld': {'text', 'yaml', 'yamlld'}, + 'yang': {'text', 'yang'}, + 'yin': {'text', 'xml', 'yin'}, + 'yml': {'text', 'yaml'}, + 'zcml': {'text', 'xml', 'zcml'}, + 'zig': {'text', 'zig'}, + 'zip': {'binary', 'zip'}, + 'zpt': {'text', 'zpt'}, + 'zsh': {'text', 'shell', 'zsh'}, +} +EXTENSIONS_NEED_BINARY_CHECK = { + 'plist': {'plist'}, + 'ppm': {'image', 'ppm'}, +} + +NAMES = { + '.ansible-lint': EXTENSIONS['yaml'], + '.babelrc': EXTENSIONS['json'] | {'babelrc'}, + '.bash_aliases': EXTENSIONS['bash'], + '.bash_profile': EXTENSIONS['bash'], + '.bashrc': EXTENSIONS['bash'], + '.bazelrc': {'text', 'bazelrc'}, + '.bowerrc': EXTENSIONS['json'] | {'bowerrc'}, + '.browserslistrc': {'text', 'browserslistrc'}, + '.clang-format': EXTENSIONS['yaml'], + '.clang-tidy': EXTENSIONS['yaml'], + '.codespellrc': EXTENSIONS['ini'] | {'codespellrc'}, + '.coveragerc': EXTENSIONS['ini'] | {'coveragerc'}, + '.cshrc': EXTENSIONS['csh'], + '.csslintrc': EXTENSIONS['json'] | {'csslintrc'}, + '.dockerignore': {'text', 'dockerignore'}, + '.editorconfig': {'text', 'editorconfig'}, + '.envrc': EXTENSIONS['bash'], + '.flake8': EXTENSIONS['ini'] | {'flake8'}, + '.gitattributes': {'text', 'gitattributes'}, + '.gitconfig': EXTENSIONS['ini'] | {'gitconfig'}, + '.gitignore': {'text', 'gitignore'}, + '.gitlint': EXTENSIONS['ini'] | {'gitlint'}, + '.gitmodules': {'text', 'gitmodules'}, + '.hgrc': EXTENSIONS['ini'] | {'hgrc'}, + '.isort.cfg': EXTENSIONS['ini'] | {'isort'}, + '.jshintrc': EXTENSIONS['json'] | {'jshintrc'}, + '.mailmap': {'text', 'mailmap'}, + '.mention-bot': EXTENSIONS['json'] | {'mention-bot'}, + '.npmignore': {'text', 'npmignore'}, + '.pdbrc': EXTENSIONS['py'] | {'pdbrc'}, + '.prettierignore': {'text', 'gitignore', 'prettierignore'}, + '.pypirc': EXTENSIONS['ini'] | {'pypirc'}, + '.rstcheck.cfg': EXTENSIONS['ini'], + '.salt-lint': EXTENSIONS['yaml'] | {'salt-lint'}, + '.yamllint': EXTENSIONS['yaml'] | {'yamllint'}, + '.zlogin': EXTENSIONS['zsh'], + '.zlogout': EXTENSIONS['zsh'], + '.zprofile': EXTENSIONS['zsh'], + '.zshrc': EXTENSIONS['zsh'], + '.zshenv': EXTENSIONS['zsh'], + 'AUTHORS': EXTENSIONS['txt'], + 'bblayers.conf': EXTENSIONS['bb'], + 'bitbake.conf': EXTENSIONS['bb'], + 'BUILD': EXTENSIONS['bzl'], + 'Cargo.toml': EXTENSIONS['toml'] | {'cargo'}, + 'Cargo.lock': EXTENSIONS['toml'] | {'cargo-lock'}, + 'CMakeLists.txt': EXTENSIONS['cmake'], + 'CHANGELOG': EXTENSIONS['txt'], + 'config.ru': EXTENSIONS['rb'], + 'Containerfile': {'text', 'dockerfile'}, + 'CONTRIBUTING': EXTENSIONS['txt'], + 'copy.bara.sky': EXTENSIONS['bzl'], + 'COPYING': EXTENSIONS['txt'], + 'Dockerfile': {'text', 'dockerfile'}, + 'direnvrc': EXTENSIONS['bash'], + 'Gemfile': EXTENSIONS['rb'], + 'Gemfile.lock': {'text'}, + 'GNUmakefile': EXTENSIONS['mk'], + 'go.mod': {'text', 'go-mod'}, + 'go.sum': {'text', 'go-sum'}, + 'Jenkinsfile': EXTENSIONS['jenkins'], + 'LICENSE': EXTENSIONS['txt'], + 'MAINTAINERS': EXTENSIONS['txt'], + 'Makefile': EXTENSIONS['mk'], + 'meson.build': EXTENSIONS['meson'], + 'meson_options.txt': EXTENSIONS['meson'], + 'makefile': EXTENSIONS['mk'], + 'NEWS': EXTENSIONS['txt'], + 'NOTICE': EXTENSIONS['txt'], + 'PATENTS': EXTENSIONS['txt'], + 'Pipfile': EXTENSIONS['toml'], + 'Pipfile.lock': EXTENSIONS['json'], + 'PKGBUILD': {'text', 'bash', 'pkgbuild', 'alpm'}, + 'poetry.lock': EXTENSIONS['toml'], + 'pom.xml': EXTENSIONS['pom'], + 'pylintrc': EXTENSIONS['ini'] | {'pylintrc'}, + 'README': EXTENSIONS['txt'], + 'Rakefile': EXTENSIONS['rb'], + 'rebar.config': EXTENSIONS['erl'], + 'setup.cfg': EXTENSIONS['ini'], + 'sys.config': EXTENSIONS['erl'], + 'sys.config.src': EXTENSIONS['erl'], + 'Tiltfile': {'text', 'tiltfile'}, + 'Vagrantfile': EXTENSIONS['rb'], + 'WORKSPACE': EXTENSIONS['bzl'], + 'wscript': EXTENSIONS['py'], +} diff --git a/.venv/Lib/site-packages/identify/identify.py b/.venv/Lib/site-packages/identify/identify.py new file mode 100644 index 00000000..0279ba8e --- /dev/null +++ b/.venv/Lib/site-packages/identify/identify.py @@ -0,0 +1,278 @@ +from __future__ import annotations + +import errno +import math +import os.path +import re +import shlex +import stat +import string +import sys +from typing import IO + +from identify import extensions +from identify import interpreters +from identify.vendor import licenses + + +printable = frozenset(string.printable) + +DIRECTORY = 'directory' +SYMLINK = 'symlink' +SOCKET = 'socket' +FILE = 'file' +EXECUTABLE = 'executable' +NON_EXECUTABLE = 'non-executable' +TEXT = 'text' +BINARY = 'binary' + +TYPE_TAGS = frozenset((DIRECTORY, FILE, SYMLINK, SOCKET)) +MODE_TAGS = frozenset((EXECUTABLE, NON_EXECUTABLE)) +ENCODING_TAGS = frozenset((BINARY, TEXT)) +_ALL_TAGS = {*TYPE_TAGS, *MODE_TAGS, *ENCODING_TAGS} +_ALL_TAGS.update(*extensions.EXTENSIONS.values()) +_ALL_TAGS.update(*extensions.EXTENSIONS_NEED_BINARY_CHECK.values()) +_ALL_TAGS.update(*extensions.NAMES.values()) +_ALL_TAGS.update(*interpreters.INTERPRETERS.values()) +ALL_TAGS = frozenset(_ALL_TAGS) + + +def tags_from_path(path: str) -> set[str]: + try: + sr = os.lstat(path) + except (OSError, ValueError): # same error-handling as `os.lexists()` + raise ValueError(f'{path} does not exist.') + + mode = sr.st_mode + if stat.S_ISDIR(mode): + return {DIRECTORY} + if stat.S_ISLNK(mode): + return {SYMLINK} + if stat.S_ISSOCK(mode): + return {SOCKET} + + tags = {FILE} + + executable = os.access(path, os.X_OK) + if executable: + tags.add(EXECUTABLE) + else: + tags.add(NON_EXECUTABLE) + + # As an optimization, if we're able to read tags from the filename, then we + # don't peek at the file contents. + t = tags_from_filename(os.path.basename(path)) + if len(t) > 0: + tags.update(t) + else: + if executable: + shebang = parse_shebang_from_file(path) + if len(shebang) > 0: + tags.update(tags_from_interpreter(shebang[0])) + + # some extensions can be both binary and text + # see EXTENSIONS_NEED_BINARY_CHECK + if not ENCODING_TAGS & tags: + if file_is_text(path): + tags.add(TEXT) + else: + tags.add(BINARY) + + assert ENCODING_TAGS & tags, tags + assert MODE_TAGS & tags, tags + return tags + + +def tags_from_filename(path: str) -> set[str]: + _, filename = os.path.split(path) + _, ext = os.path.splitext(filename) + + ret = set() + + # Allow e.g. "Dockerfile.xenial" to match "Dockerfile" + for part in [filename] + filename.split('.'): + if part in extensions.NAMES: + ret.update(extensions.NAMES[part]) + break + + if len(ext) > 0: + ext = ext[1:].lower() + if ext in extensions.EXTENSIONS: + ret.update(extensions.EXTENSIONS[ext]) + elif ext in extensions.EXTENSIONS_NEED_BINARY_CHECK: + ret.update(extensions.EXTENSIONS_NEED_BINARY_CHECK[ext]) + + return ret + + +def tags_from_interpreter(interpreter: str) -> set[str]: + _, _, interpreter = interpreter.rpartition('/') + + # Try "python3.5.2" => "python3.5" => "python3" until one matches. + while interpreter: + if interpreter in interpreters.INTERPRETERS: + return interpreters.INTERPRETERS[interpreter] + else: + interpreter, _, _ = interpreter.rpartition('.') + + return set() + + +def is_text(bytesio: IO[bytes]) -> bool: + """Return whether the first KB of contents seems to be binary. + + This is roughly based on libmagic's binary/text detection: + https://github.com/file/file/blob/df74b09b9027676088c797528edcaae5a9ce9ad0/src/encoding.c#L203-L228 + """ + text_chars = ( + bytearray([7, 8, 9, 10, 11, 12, 13, 27]) + + bytearray(range(0x20, 0x7F)) + + bytearray(range(0x80, 0X100)) + ) + return not bool(bytesio.read(1024).translate(None, text_chars)) + + +def file_is_text(path: str) -> bool: + if not os.path.lexists(path): + raise ValueError(f'{path} does not exist.') + with open(path, 'rb') as f: + return is_text(f) + + +def _shebang_split(line: str) -> list[str]: + try: + # shebangs aren't supposed to be quoted, though some tools such as + # setuptools will write them with quotes so we'll best-guess parse + # with shlex first + return shlex.split(line) + except ValueError: + # failing that, we'll do a more "traditional" shebang parsing which + # just involves splitting by whitespace + return line.split() + + +def _parse_nix_shebang( + bytesio: IO[bytes], + cmd: tuple[str, ...], +) -> tuple[str, ...]: + while bytesio.read(2) == b'#!': + next_line_b = bytesio.readline() + try: + next_line = next_line_b.decode('UTF-8') + except UnicodeDecodeError: + return cmd + + for c in next_line: + if c not in printable: + return cmd + + line_tokens = tuple(_shebang_split(next_line.strip())) + for i, token in enumerate(line_tokens[:-1]): + if token != '-i': + continue + # the argument to -i flag + cmd = (line_tokens[i + 1],) + return cmd + + +def parse_shebang(bytesio: IO[bytes]) -> tuple[str, ...]: + """Parse the shebang from a file opened for reading binary.""" + if bytesio.read(2) != b'#!': + return () + first_line_b = bytesio.readline() + try: + first_line = first_line_b.decode('UTF-8') + except UnicodeDecodeError: + return () + + # Require only printable ascii + for c in first_line: + if c not in printable: + return () + + cmd = tuple(_shebang_split(first_line.strip())) + if cmd[:2] == ('/usr/bin/env', '-S'): + cmd = cmd[2:] + elif cmd[:1] == ('/usr/bin/env',): + cmd = cmd[1:] + + if cmd == ('nix-shell',): + return _parse_nix_shebang(bytesio, cmd) + + return cmd + + +def parse_shebang_from_file(path: str) -> tuple[str, ...]: + """Parse the shebang given a file path.""" + if not os.path.lexists(path): + raise ValueError(f'{path} does not exist.') + if not os.access(path, os.X_OK): + return () + + try: + with open(path, 'rb') as f: + return parse_shebang(f) + except OSError as e: + if e.errno == errno.EINVAL: + return () + else: + raise + + +COPYRIGHT_RE = re.compile(r'^\s*(Copyright|\(C\)) .*$', re.I | re.MULTILINE) +WS_RE = re.compile(r'\s+') + + +def _norm_license(s: str) -> str: + s = COPYRIGHT_RE.sub('', s) + s = WS_RE.sub(' ', s) + return s.strip() + + +def license_id(filename: str) -> str | None: + """Return the spdx id for the license contained in `filename`. If no + license is detected, returns `None`. + + spdx: https://spdx.org/licenses/ + licenses from choosealicense.com: https://github.com/choosealicense.com + + Approximate algorithm: + + 1. strip copyright line + 2. normalize whitespace (replace all whitespace with a single space) + 3. check exact text match with existing licenses + 4. failing that use edit distance + """ + import ukkonen # `pip install identify[license]` + + with open(filename, encoding='UTF-8') as f: + contents = f.read() + + norm = _norm_license(contents) + + min_edit_dist = sys.maxsize + min_edit_dist_spdx = '' + + cutoff = math.ceil(.05 * len(norm)) + + # try exact matches + for spdx, text in licenses.LICENSES: + norm_license = _norm_license(text) + if norm == norm_license: + return spdx + + # skip the slow calculation if the lengths are very different + if norm and abs(len(norm) - len(norm_license)) / len(norm) > .05: + continue + + edit_dist = ukkonen.distance(norm, norm_license, cutoff) + if edit_dist < cutoff and edit_dist < min_edit_dist: + min_edit_dist = edit_dist + min_edit_dist_spdx = spdx + + # if there's less than 5% edited from the license, we found our match + if norm and min_edit_dist < cutoff: + return min_edit_dist_spdx + else: + # no matches :'( + return None diff --git a/.venv/Lib/site-packages/identify/interpreters.py b/.venv/Lib/site-packages/identify/interpreters.py new file mode 100644 index 00000000..3022e009 --- /dev/null +++ b/.venv/Lib/site-packages/identify/interpreters.py @@ -0,0 +1,25 @@ +from __future__ import annotations +INTERPRETERS = { + 'ash': {'shell', 'ash'}, + 'awk': {'awk'}, + 'bash': {'shell', 'bash'}, + 'bats': {'shell', 'bash', 'bats'}, + 'cbsd': {'shell', 'cbsd'}, + 'csh': {'shell', 'csh'}, + 'dash': {'shell', 'dash'}, + 'expect': {'expect'}, + 'ksh': {'shell', 'ksh'}, + 'node': {'javascript'}, + 'nodejs': {'javascript'}, + 'perl': {'perl'}, + 'php': {'php'}, + 'php7': {'php', 'php7'}, + 'php8': {'php', 'php8'}, + 'python': {'python'}, + 'python2': {'python', 'python2'}, + 'python3': {'python', 'python3'}, + 'ruby': {'ruby'}, + 'sh': {'shell', 'sh'}, + 'tcsh': {'shell', 'tcsh'}, + 'zsh': {'shell', 'zsh'}, +} diff --git a/.venv/Lib/site-packages/identify/py.typed b/.venv/Lib/site-packages/identify/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/identify/vendor/__init__.py b/.venv/Lib/site-packages/identify/vendor/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/.venv/Lib/site-packages/identify/vendor/licenses.py b/.venv/Lib/site-packages/identify/vendor/licenses.py new file mode 100644 index 00000000..14ebfb3e --- /dev/null +++ b/.venv/Lib/site-packages/identify/vendor/licenses.py @@ -0,0 +1,6747 @@ +from __future__ import annotations +LICENSES = ( + ( + '0BSD', + '''\ +Copyright (c) [year] [fullname] + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +''', + ), + ( + 'AFL-3.0', + '''\ +Academic Free License (“AFL”) v. 3.0 + +This Academic Free License (the "License") applies to any original work of +authorship (the "Original Work") whose owner (the "Licensor") has placed the +following licensing notice adjacent to the copyright notice for the Original +Work: + + Licensed under the Academic Free License version 3.0 + +1) Grant of Copyright License. Licensor grants You a worldwide, royalty-free, +non-exclusive, sublicensable license, for the duration of the copyright, to do +the following: + + a) to reproduce the Original Work in copies, either alone or as part of a + collective work; + + b) to translate, adapt, alter, transform, modify, or arrange the Original + Work, thereby creating derivative works ("Derivative Works") based upon + the Original Work; + + c) to distribute or communicate copies of the Original Work and + Derivative Works to the public, under any license of your choice that + does not contradict the terms and conditions, including Licensor’s + reserved rights and remedies, in this Academic Free License; + d) to perform the Original Work publicly; and + e) to display the Original Work publicly. + +2) Grant of Patent License. Licensor grants You a worldwide, royalty-free, +non-exclusive, sublicensable license, under patent claims owned or controlled +by the Licensor that are embodied in the Original Work as furnished by the +Licensor, for the duration of the patents, to make, use, sell, offer for sale, +have made, and import the Original Work and Derivative Works. + +3) Grant of Source Code License. The term "Source Code" means the preferred +form of the Original Work for making modifications to it and all available +documentation describing how to modify the Original Work. Licensor agrees to +provide a machine-readable copy of the Source Code of the Original Work along +with each copy of the Original Work that Licensor distributes. Licensor +reserves the right to satisfy this obligation by placing a machine-readable +copy of the Source Code in an information repository reasonably calculated to +permit inexpensive and convenient access by You for as long as Licensor +continues to distribute the Original Work. + +4) Exclusions From License Grant. Neither the names of Licensor, nor the names +of any contributors to the Original Work, nor any of their trademarks or +service marks, may be used to endorse or promote products derived from this +Original Work without express prior permission of the Licensor. Except as +expressly stated herein, nothing in this License grants any license to +Licensor’s trademarks, copyrights, patents, trade secrets or any other +intellectual property. No patent license is granted to make, use, sell, offer +for sale, have made, or import embodiments of any patent claims other than the +licensed claims defined in Section 2. No license is granted to the trademarks +of Licensor even if such marks are included in the Original Work. Nothing in +this License shall be interpreted to prohibit Licensor from licensing under +terms different from this License any Original Work that Licensor otherwise +would have a right to license. + +5) External Deployment. The term "External Deployment" means the use, +distribution, or communication of the Original Work or Derivative Works in any +way such that the Original Work or Derivative Works may be used by anyone +other than You, whether those works are distributed or communicated to those +persons or made available as an application intended for use over a network. +As an express condition for the grants of license hereunder, You must treat +any External Deployment by You of the Original Work or a Derivative Work as a +distribution under section 1(c). + +6) Attribution Rights. You must retain, in the Source Code of any Derivative +Works that You create, all copyright, patent, or trademark notices from the +Source Code of the Original Work, as well as any notices of licensing and any +descriptive text identified therein as an "Attribution Notice." You must cause +the Source Code for any Derivative Works that You create to carry a prominent +Attribution Notice reasonably calculated to inform recipients that You have +modified the Original Work. + +7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that +the copyright in and to the Original Work and the patent rights granted herein +by Licensor are owned by the Licensor or are sublicensed to You under the +terms of this License with the permission of the contributor(s) of those +copyrights and patent rights. Except as expressly stated in the immediately +preceding sentence, the Original Work is provided under this License on an "AS +IS" BASIS and WITHOUT WARRANTY, either express or implied, including, without +limitation, the warranties of non-infringement, merchantability or fitness for +a particular purpose. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK +IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this +License. No license to the Original Work is granted by this License except +under this disclaimer. + +8) Limitation of Liability. Under no circumstances and under no legal theory, +whether in tort (including negligence), contract, or otherwise, shall the +Licensor be liable to anyone for any indirect, special, incidental, or +consequential damages of any character arising as a result of this License or +the use of the Original Work including, without limitation, damages for loss +of goodwill, work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses. This limitation of liability shall not +apply to the extent applicable law prohibits such limitation. + +9) Acceptance and Termination. If, at any time, You expressly assented to this +License, that assent indicates your clear and irrevocable acceptance of this +License and all of its terms and conditions. If You distribute or communicate +copies of the Original Work or a Derivative Work, You must make a reasonable +effort under the circumstances to obtain the express assent of recipients to +the terms of this License. This License conditions your rights to undertake +the activities listed in Section 1, including your right to create Derivative +Works based upon the Original Work, and doing so without honoring these terms +and conditions is prohibited by copyright law and international treaty. +Nothing in this License is intended to affect copyright exceptions and +limitations (including “fair use” or “fair dealing”). This License shall +terminate immediately and You may no longer exercise any of the rights granted +to You by this License upon your failure to honor the conditions in Section +1(c). + +10) Termination for Patent Action. This License shall terminate automatically +and You may no longer exercise any of the rights granted to You by this +License as of the date You commence an action, including a cross-claim or +counterclaim, against Licensor or any licensee alleging that the Original Work +infringes a patent. This termination provision shall not apply for an action +alleging patent infringement by combinations of the Original Work with other +software or hardware. + +11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this +License may be brought only in the courts of a jurisdiction wherein the +Licensor resides or in which Licensor conducts its primary business, and under +the laws of that jurisdiction excluding its conflict-of-law provisions. The +application of the United Nations Convention on Contracts for the +International Sale of Goods is expressly excluded. Any use of the Original +Work outside the scope of this License or after its termination shall be +subject to the requirements and penalties of copyright or patent law in the +appropriate jurisdiction. This section shall survive the termination of this +License. + +12) Attorneys’ Fees. In any action to enforce the terms of this License or +seeking damages relating thereto, the prevailing party shall be entitled to +recover its costs and expenses, including, without limitation, reasonable +attorneys' fees and costs incurred in connection with such action, including +any appeal of such action. This section shall survive the termination of this +License. + +13) Miscellaneous. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent necessary +to make it enforceable. + +14) Definition of "You" in This License. "You" throughout this License, +whether in upper or lower case, means an individual or a legal entity +exercising rights under, and complying with all of the terms of, this License. +For legal entities, "You" includes any entity that controls, is controlled by, +or is under common control with you. For purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the direction or +management of such entity, whether by contract or otherwise, or (ii) ownership +of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial +ownership of such entity. + +15) Right to Use. You may use the Original Work in all ways not otherwise +restricted or conditioned by this License or by law, and Licensor promises not +to interfere with or be responsible for such uses by You. + +16) Modification of This License. This License is Copyright © 2005 Lawrence +Rosen. Permission is granted to copy, distribute, or communicate this License +without modification. Nothing in this License permits You to modify this +License as applied to the Original Work or to Derivative Works. However, You +may modify the text of this License and copy, distribute or communicate your +modified version (the "Modified License") and apply it to other original works +of authorship subject to the following conditions: (i) You may not indicate in +any way that your Modified License is the "Academic Free License" or "AFL" and +you may not use those names in the name of your Modified License; (ii) You +must replace the notice specified in the first paragraph above with the notice +"Licensed under " or with a notice of your own +that is not confusingly similar to the notice in this License; and (iii) You +may not claim that your original works are open source software unless your +Modified License has been approved by Open Source Initiative (OSI) and You +comply with its license review and certification process. +''', + ), + ( + 'AGPL-3.0', + '''\ +GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. +''', + ), + ( + 'Apache-2.0', + '''\ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +''', + ), + ( + 'Artistic-2.0', + '''\ +The Artistic License 2.0 + + Copyright (c) 2000-2006, The Perl Foundation. + + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +Preamble + +This license establishes the terms under which a given free software +Package may be copied, modified, distributed, and/or redistributed. +The intent is that the Copyright Holder maintains some artistic +control over the development of that Package while still keeping the +Package available as open source and free software. + +You are always permitted to make arrangements wholly outside of this +license directly with the Copyright Holder of a given Package. If the +terms of this license do not permit the full use that you propose to +make of the Package, you should contact the Copyright Holder and seek +a different licensing arrangement. + +Definitions + + "Copyright Holder" means the individual(s) or organization(s) + named in the copyright notice for the entire Package. + + "Contributor" means any party that has contributed code or other + material to the Package, in accordance with the Copyright Holder's + procedures. + + "You" and "your" means any person who would like to copy, + distribute, or modify the Package. + + "Package" means the collection of files distributed by the + Copyright Holder, and derivatives of that collection and/or of + those files. A given Package may consist of either the Standard + Version, or a Modified Version. + + "Distribute" means providing a copy of the Package or making it + accessible to anyone else, or in the case of a company or + organization, to others outside of your company or organization. + + "Distributor Fee" means any fee that you charge for Distributing + this Package or providing support for this Package to another + party. It does not mean licensing fees. + + "Standard Version" refers to the Package if it has not been + modified, or has been modified only in ways explicitly requested + by the Copyright Holder. + + "Modified Version" means the Package, if it has been changed, and + such changes were not explicitly requested by the Copyright + Holder. + + "Original License" means this Artistic License as Distributed with + the Standard Version of the Package, in its current version or as + it may be modified by The Perl Foundation in the future. + + "Source" form means the source code, documentation source, and + configuration files for the Package. + + "Compiled" form means the compiled bytecode, object code, binary, + or any other form resulting from mechanical transformation or + translation of the Source form. + + +Permission for Use and Modification Without Distribution + +(1) You are permitted to use the Standard Version and create and use +Modified Versions for any purpose without restriction, provided that +you do not Distribute the Modified Version. + + +Permissions for Redistribution of the Standard Version + +(2) You may Distribute verbatim copies of the Source form of the +Standard Version of this Package in any medium without restriction, +either gratis or for a Distributor Fee, provided that you duplicate +all of the original copyright notices and associated disclaimers. At +your discretion, such verbatim copies may or may not include a +Compiled form of the Package. + +(3) You may apply any bug fixes, portability changes, and other +modifications made available from the Copyright Holder. The resulting +Package will still be considered the Standard Version, and as such +will be subject to the Original License. + + +Distribution of Modified Versions of the Package as Source + +(4) You may Distribute your Modified Version as Source (either gratis +or for a Distributor Fee, and with or without a Compiled form of the +Modified Version) provided that you clearly document how it differs +from the Standard Version, including, but not limited to, documenting +any non-standard features, executables, or modules, and provided that +you do at least ONE of the following: + + (a) make the Modified Version available to the Copyright Holder + of the Standard Version, under the Original License, so that the + Copyright Holder may include your modifications in the Standard + Version. + + (b) ensure that installation of your Modified Version does not + prevent the user installing or running the Standard Version. In + addition, the Modified Version must bear a name that is different + from the name of the Standard Version. + + (c) allow anyone who receives a copy of the Modified Version to + make the Source form of the Modified Version available to others + under + + (i) the Original License or + + (ii) a license that permits the licensee to freely copy, + modify and redistribute the Modified Version using the same + licensing terms that apply to the copy that the licensee + received, and requires that the Source form of the Modified + Version, and of any works derived from it, be made freely + available in that license fees are prohibited but Distributor + Fees are allowed. + + +Distribution of Compiled Forms of the Standard Version +or Modified Versions without the Source + +(5) You may Distribute Compiled forms of the Standard Version without +the Source, provided that you include complete instructions on how to +get the Source of the Standard Version. Such instructions must be +valid at the time of your distribution. If these instructions, at any +time while you are carrying out such distribution, become invalid, you +must provide new instructions on demand or cease further distribution. +If you provide valid instructions or cease distribution within thirty +days after you become aware that the instructions are invalid, then +you do not forfeit any of your rights under this license. + +(6) You may Distribute a Modified Version in Compiled form without +the Source, provided that you comply with Section 4 with respect to +the Source of the Modified Version. + + +Aggregating or Linking the Package + +(7) You may aggregate the Package (either the Standard Version or +Modified Version) with other packages and Distribute the resulting +aggregation provided that you do not charge a licensing fee for the +Package. Distributor Fees are permitted, and licensing fees for other +components in the aggregation are permitted. The terms of this license +apply to the use and Distribution of the Standard or Modified Versions +as included in the aggregation. + +(8) You are permitted to link Modified and Standard Versions with +other works, to embed the Package in a larger work of your own, or to +build stand-alone binary or bytecode versions of applications that +include the Package, and Distribute the result without restriction, +provided the result does not expose a direct interface to the Package. + + +Items That are Not Considered Part of a Modified Version + +(9) Works (including, but not limited to, modules and scripts) that +merely extend or make use of the Package, do not, by themselves, cause +the Package to be a Modified Version. In addition, such works are not +considered parts of the Package itself, and are not subject to the +terms of this license. + + +General Provisions + +(10) Any use, modification, and distribution of the Standard or +Modified Versions is governed by this Artistic License. By using, +modifying or distributing the Package, you accept this license. Do not +use, modify, or distribute the Package, if you do not accept this +license. + +(11) If your Modified Version has been derived from a Modified +Version made by someone other than you, you are nevertheless required +to ensure that your Modified Version complies with the requirements of +this license. + +(12) This license does not grant you the right to use any trademark, +service mark, tradename, or logo of the Copyright Holder. + +(13) This license includes the non-exclusive, worldwide, +free-of-charge patent license to make, have made, use, offer to sell, +sell, import and otherwise transfer the Package with respect to any +patent claims licensable by the Copyright Holder that are necessarily +infringed by the Package. If you institute patent litigation +(including a cross-claim or counterclaim) against any party alleging +that the Package constitutes direct or contributory patent +infringement, then this Artistic License to you shall terminate on the +date that such litigation is filed. + +(14) Disclaimer of Warranty: +THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS +IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR +NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL +LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +''', + ), + ( + 'BSD-2-Clause', + '''\ +BSD 2-Clause License + +Copyright (c) [year], [fullname] +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +''', + ), + ( + 'BSD-3-Clause', + '''\ +BSD 3-Clause License + +Copyright (c) [year], [fullname] +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +''', + ), + ( + 'BSD-3-Clause-Clear', + '''\ +The Clear BSD License + +Copyright (c) [year] [fullname] +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the disclaimer +below) provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY +THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. +''', + ), + ( + 'BSL-1.0', + '''\ +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +''', + ), + ( + 'CC-BY-4.0', + '''\ +Attribution 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More_considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution 4.0 International Public License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution 4.0 International Public License ("Public License"). To the +extent this Public License may be interpreted as a contract, You are +granted the Licensed Rights in consideration of Your acceptance of +these terms and conditions, and the Licensor grants You such rights in +consideration of benefits the Licensor receives from making the +Licensed Material available under these terms and conditions. + + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + + d. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + e. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + f. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + g. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + h. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + i. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + j. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + k. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part; and + + b. produce, reproduce, and Share Adapted Material. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties. + + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's + License You apply must not prevent recipients of the Adapted + Material from complying with this Public License. + + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. +''', + ), + ( + 'CC-BY-SA-4.0', + '''\ +Attribution-ShareAlike 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More_considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution-ShareAlike 4.0 International Public +License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution-ShareAlike 4.0 International Public License ("Public +License"). To the extent this Public License may be interpreted as a +contract, You are granted the Licensed Rights in consideration of Your +acceptance of these terms and conditions, and the Licensor grants You +such rights in consideration of benefits the Licensor receives from +making the Licensed Material available under these terms and +conditions. + + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. BY-SA Compatible License means a license listed at + creativecommons.org/compatiblelicenses, approved by Creative + Commons as essentially the equivalent of this Public License. + + d. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + + e. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + f. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + g. License Elements means the license attributes listed in the name + of a Creative Commons Public License. The License Elements of this + Public License are Attribution and ShareAlike. + + h. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + i. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + j. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + k. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + l. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + m. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part; and + + b. produce, reproduce, and Share Adapted Material. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. Additional offer from the Licensor -- Adapted Material. + Every recipient of Adapted Material from You + automatically receives an offer from the Licensor to + exercise the Licensed Rights in the Adapted Material + under the conditions of the Adapter's License You apply. + + c. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties. + + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + b. ShareAlike. + + In addition to the conditions in Section 3(a), if You Share + Adapted Material You produce, the following conditions also apply. + + 1. The Adapter's License You apply must be a Creative Commons + license with the same License Elements, this version or + later, or a BY-SA Compatible License. + + 2. You must include the text of, or the URI or hyperlink to, the + Adapter's License You apply. You may satisfy this condition + in any reasonable manner based on the medium, means, and + context in which You Share Adapted Material. + + 3. You may not offer or impose any additional or different terms + or conditions on, or apply any Effective Technological + Measures to, Adapted Material that restrict exercise of the + rights granted under the Adapter's License You apply. + + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material, + + including for purposes of Section 3(b); and + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. +''', + ), + ( + 'CC0-1.0', + '''\ +CC0 1.0 Universal + +Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator and +subsequent owner(s) (each and all, an "owner") of an original work of +authorship and/or a database (each, a "Work"). + +Certain owners wish to permanently relinquish those rights to a Work for the +purpose of contributing to a commons of creative, cultural and scientific +works ("Commons") that the public can reliably and without fear of later +claims of infringement build upon, modify, incorporate in other works, reuse +and redistribute as freely as possible in any form whatsoever and for any +purposes, including without limitation commercial purposes. These owners may +contribute to the Commons to promote the ideal of a free culture and the +further production of creative, cultural and scientific works, or to gain +reputation or greater distribution for their Work in part through the use and +efforts of others. + +For these and/or other purposes and motivations, and without any expectation +of additional consideration or compensation, the person associating CC0 with a +Work (the "Affirmer"), to the extent that he or she is an owner of Copyright +and Related Rights in the Work, voluntarily elects to apply CC0 to the Work +and publicly distribute the Work under its terms, with knowledge of his or her +Copyright and Related Rights in the Work and the meaning and intended legal +effect of CC0 on those rights. + +1. Copyright and Related Rights. A Work made available under CC0 may be +protected by copyright and related or neighboring rights ("Copyright and +Related Rights"). Copyright and Related Rights include, but are not limited +to, the following: + + i. the right to reproduce, adapt, distribute, perform, display, communicate, + and translate a Work; + + ii. moral rights retained by the original author(s) and/or performer(s); + + iii. publicity and privacy rights pertaining to a person's image or likeness + depicted in a Work; + + iv. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(a), below; + + v. rights protecting the extraction, dissemination, use and reuse of data in + a Work; + + vi. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation thereof, + including any amended or successor version of such directive); and + + vii. other similar, equivalent or corresponding rights throughout the world + based on applicable law or treaty, and any national implementations thereof. + +2. Waiver. To the greatest extent permitted by, but not in contravention of, +applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and +unconditionally waives, abandons, and surrenders all of Affirmer's Copyright +and Related Rights and associated claims and causes of action, whether now +known or unknown (including existing as well as future claims and causes of +action), in the Work (i) in all territories worldwide, (ii) for the maximum +duration provided by applicable law or treaty (including future time +extensions), (iii) in any current or future medium and for any number of +copies, and (iv) for any purpose whatsoever, including without limitation +commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes +the Waiver for the benefit of each member of the public at large and to the +detriment of Affirmer's heirs and successors, fully intending that such Waiver +shall not be subject to revocation, rescission, cancellation, termination, or +any other legal or equitable action to disrupt the quiet enjoyment of the Work +by the public as contemplated by Affirmer's express Statement of Purpose. + +3. Public License Fallback. Should any part of the Waiver for any reason be +judged legally invalid or ineffective under applicable law, then the Waiver +shall be preserved to the maximum extent permitted taking into account +Affirmer's express Statement of Purpose. In addition, to the extent the Waiver +is so judged Affirmer hereby grants to each affected person a royalty-free, +non transferable, non sublicensable, non exclusive, irrevocable and +unconditional license to exercise Affirmer's Copyright and Related Rights in +the Work (i) in all territories worldwide, (ii) for the maximum duration +provided by applicable law or treaty (including future time extensions), (iii) +in any current or future medium and for any number of copies, and (iv) for any +purpose whatsoever, including without limitation commercial, advertising or +promotional purposes (the "License"). The License shall be deemed effective as +of the date CC0 was applied by Affirmer to the Work. Should any part of the +License for any reason be judged legally invalid or ineffective under +applicable law, such partial invalidity or ineffectiveness shall not +invalidate the remainder of the License, and in such case Affirmer hereby +affirms that he or she will not (i) exercise any of his or her remaining +Copyright and Related Rights in the Work or (ii) assert any associated claims +and causes of action with respect to the Work, in either case contrary to +Affirmer's express Statement of Purpose. + +4. Limitations and Disclaimers. + + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + + b. Affirmer offers the Work as-is and makes no representations or warranties + of any kind concerning the Work, express, implied, statutory or otherwise, + including without limitation warranties of title, merchantability, fitness + for a particular purpose, non infringement, or the absence of latent or + other defects, accuracy, or the present or absence of errors, whether or not + discoverable, all to the greatest extent permissible under applicable law. + + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without limitation + any person's Copyright and Related Rights in the Work. Further, Affirmer + disclaims responsibility for obtaining any necessary consents, permissions + or other rights required for any use of the Work. + + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to this + CC0 or use of the Work. + +For more information, please see + +''', + ), + ( + 'ECL-2.0', + '''\ +Educational Community License + +Version 2.0, April 2007 + +http://opensource.org/licenses/ECL-2.0 + +The Educational Community License version 2.0 ("ECL") consists of the Apache +2.0 license, modified to change the scope of the patent grant in section 3 to +be specific to the needs of the education communities using this license. The +original Apache 2.0 license can be found at: +http://www.apache.org/licenses/LICENSE-2.0 + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the +copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other +entities that control, are controlled by, or are under common control with +that entity. For the purposes of this definition, "control" means (i) the +power, direct or indirect, to cause the direction or management of such +entity, whether by contract or otherwise, or (ii) ownership of fifty percent +(50%) or more of the outstanding shares, or (iii) beneficial ownership of such +entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation source, and +configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object +code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, +made available under the License, as indicated by a copyright notice that is +included in or attached to the work (an example is provided in the Appendix +below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative +Works shall not include works that remain separable from, or merely link (or +bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original +version of the Work and any modifications or additions to that Work or +Derivative Works thereof, that is intentionally submitted to Licensor for +inclusion in the Work by the copyright owner or by an individual or Legal +Entity authorized to submit on behalf of the copyright owner. For the purposes +of this definition, "submitted" means any form of electronic, verbal, or +written communication sent to the Licensor or its representatives, including +but not limited to communication on electronic mailing lists, source code +control systems, and issue tracking systems that are managed by, or on behalf +of, the Licensor for the purpose of discussing and improving the Work, but +excluding communication that is conspicuously marked or otherwise designated +in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and +such Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such +Contributor that are necessarily infringed by their Contribution(s) alone or +by combination of their Contribution(s) with the Work to which such +Contribution(s) was submitted. If You institute patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging that +the Work or a Contribution incorporated within the Work constitutes direct or +contributory patent infringement, then any patent licenses granted to You +under this License for that Work shall terminate as of the date such +litigation is filed. Any patent license granted hereby with respect to +contributions by an individual employed by an institution or organization is +limited to patent claims where the individual that is the author of the Work +is also the inventor of the patent claims licensed, and where the organization +or institution has the right to grant such license under applicable grant and +research funding agreements. No other express or implied licenses are granted. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works +thereof in any medium, with or without modifications, and in Source or Object +form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and You must cause any modified files to carry prominent notices +stating that You changed the files; and You must retain, in the Source form of +any Derivative Works that You distribute, all copyright, patent, trademark, +and attribution notices from the Source form of the Work, excluding those +notices that do not pertain to any part of the Derivative Works; and If the +Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of +the following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents +of the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. You may add Your own copyright statement to Your +modifications and may provide additional or different license terms and +conditions for use, reproduction, or distribution of Your modifications, or +for any such Derivative Works as a whole, provided Your use, reproduction, and +distribution of the Work otherwise complies with the conditions stated in this +License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally +submitted for inclusion in the Work by You to the Licensor shall be under the +terms and conditions of this License, without any additional terms or +conditions. Notwithstanding the above, nothing herein shall supersede or +modify the terms of any separate license agreement you may have executed with +Licensor regarding such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides +the Work (and each Contributor provides its Contributions) on an "AS IS" +BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied, including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You +are solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a result of +this License or out of the use or inability to use the Work (including but not +limited to damages for loss of goodwill, work stoppage, computer failure or +malfunction, or any and all other commercial damages or losses), even if such +Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. +However, in accepting such obligations, You may act only on Your own behalf +and on Your sole responsibility, not on behalf of any other Contributor, and +only if You agree to indemnify, defend, and hold each Contributor harmless for +any liability incurred by, or claims asserted against, such Contributor by +reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Educational Community License to your work + +To apply the Educational Community License to your work, attach the following +boilerplate notice, with the fields enclosed by brackets "[]" replaced with +your own identifying information. (Don't include the brackets!) The text +should be enclosed in the appropriate comment syntax for the file format. We +also recommend that a file or class name and description of purpose be +included on the same "printed page" as the copyright notice for easier +identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] Licensed under the Educational +Community License, Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain a copy of the License at + +http://opensource.org/licenses/ECL-2.0 + + Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations under +the License. +''', + ), + ( + 'EPL-1.0', + '''\ +Eclipse Public License - v 1.0 + +THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC +LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM +CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +1. DEFINITIONS + +"Contribution" means: + a) in the case of the initial Contributor, the initial code and + documentation distributed under this Agreement, and + b) in the case of each subsequent Contributor: + i) changes to the Program, and + ii) additions to the Program; + +where such changes and/or additions to the Program originate from and are +distributed by that particular Contributor. A Contribution 'originates' from a +Contributor if it was added to the Program by such Contributor itself or +anyone acting on such Contributor's behalf. Contributions do not include +additions to the Program which: (i) are separate modules of software +distributed in conjunction with the Program under their own license agreement, +and (ii) are not derivative works of the Program. +"Contributor" means any person or entity that distributes the Program. + +"Licensed Patents" mean patent claims licensable by a Contributor which are +necessarily infringed by the use or sale of its Contribution alone or when +combined with the Program. + +"Program" means the Contributions distributed in accordance with this +Agreement. + +"Recipient" means anyone who receives the Program under this Agreement, +including all Contributors. + +2. GRANT OF RIGHTS + + a) Subject to the terms of this Agreement, each Contributor hereby grants + Recipient a non-exclusive, worldwide, royalty-free copyright license to + reproduce, prepare derivative works of, publicly display, publicly + perform, distribute and sublicense the Contribution of such Contributor, + if any, and such derivative works, in source code and object code form. + + b) Subject to the terms of this Agreement, each Contributor hereby grants + Recipient a non-exclusive, worldwide, royalty-free patent license under + Licensed Patents to make, use, sell, offer to sell, import and otherwise + transfer the Contribution of such Contributor, if any, in source code and + object code form. This patent license shall apply to the combination of + the Contribution and the Program if, at the time the Contribution is + added by the Contributor, such addition of the Contribution causes such + combination to be covered by the Licensed Patents. The patent license + shall not apply to any other combinations which include the Contribution. + No hardware per se is licensed hereunder. + + c) Recipient understands that although each Contributor grants the + licenses to its Contributions set forth herein, no assurances are + provided by any Contributor that the Program does not infringe the patent + or other intellectual property rights of any other entity. Each + Contributor disclaims any liability to Recipient for claims brought by + any other entity based on infringement of intellectual property rights or + otherwise. As a condition to exercising the rights and licenses granted + hereunder, each Recipient hereby assumes sole responsibility to secure + any other intellectual property rights needed, if any. For example, if a + third party patent license is required to allow Recipient to distribute + the Program, it is Recipient's responsibility to acquire that license + before distributing the Program. + + d) Each Contributor represents that to its knowledge it has sufficient + copyright rights in its Contribution, if any, to grant the copyright + license set forth in this Agreement. + +3. REQUIREMENTS +A Contributor may choose to distribute the Program in object code form under +its own license agreement, provided that: + + a) it complies with the terms and conditions of this Agreement; and + + b) its license agreement: + i) effectively disclaims on behalf of all Contributors all + warranties and conditions, express and implied, including warranties + or conditions of title and non-infringement, and implied warranties + or conditions of merchantability and fitness for a particular + purpose; + ii) effectively excludes on behalf of all Contributors all liability + for damages, including direct, indirect, special, incidental and + consequential damages, such as lost profits; + iii) states that any provisions which differ from this Agreement are + offered by that Contributor alone and not by any other party; and + iv) states that source code for the Program is available from such + Contributor, and informs licensees how to obtain it in a reasonable + manner on or through a medium customarily used for software + exchange. + +When the Program is made available in source code form: + + a) it must be made available under this Agreement; and + + b) a copy of this Agreement must be included with each copy of the + Program. +Contributors may not remove or alter any copyright notices contained within +the Program. + +Each Contributor must identify itself as the originator of its Contribution, +if any, in a manner that reasonably allows subsequent Recipients to identify +the originator of the Contribution. + +4. COMMERCIAL DISTRIBUTION +Commercial distributors of software may accept certain responsibilities with +respect to end users, business partners and the like. While this license is +intended to facilitate the commercial use of the Program, the Contributor who +includes the Program in a commercial product offering should do so in a manner +which does not create potential liability for other Contributors. Therefore, +if a Contributor includes the Program in a commercial product offering, such +Contributor ("Commercial Contributor") hereby agrees to defend and indemnify +every other Contributor ("Indemnified Contributor") against any losses, +damages and costs (collectively "Losses") arising from claims, lawsuits and +other legal actions brought by a third party against the Indemnified +Contributor to the extent caused by the acts or omissions of such Commercial +Contributor in connection with its distribution of the Program in a commercial +product offering. The obligations in this section do not apply to any claims +or Losses relating to any actual or alleged intellectual property +infringement. In order to qualify, an Indemnified Contributor must: a) +promptly notify the Commercial Contributor in writing of such claim, and b) +allow the Commercial Contributor to control, and cooperate with the Commercial +Contributor in, the defense and any related settlement negotiations. The +Indemnified Contributor may participate in any such claim at its own expense. + +For example, a Contributor might include the Program in a commercial product +offering, Product X. That Contributor is then a Commercial Contributor. If +that Commercial Contributor then makes performance claims, or offers +warranties related to Product X, those performance claims and warranties are +such Commercial Contributor's responsibility alone. Under this section, the +Commercial Contributor would have to defend claims against the other +Contributors related to those performance claims and warranties, and if a +court requires any other Contributor to pay any damages as a result, the +Commercial Contributor must pay those damages. + +5. NO WARRANTY +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR +IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, +NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each +Recipient is solely responsible for determining the appropriateness of using +and distributing the Program and assumes all risks associated with its +exercise of rights under this Agreement , including but not limited to the +risks and costs of program errors, compliance with applicable laws, damage to +or loss of data, programs or equipment, and unavailability or interruption of +operations. + +6. DISCLAIMER OF LIABILITY +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY +CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION +LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE +EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY +OF SUCH DAMAGES. + +7. GENERAL + +If any provision of this Agreement is invalid or unenforceable under +applicable law, it shall not affect the validity or enforceability of the +remainder of the terms of this Agreement, and without further action by the +parties hereto, such provision shall be reformed to the minimum extent +necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Program itself +(excluding combinations of the Program with other software or hardware) +infringes such Recipient's patent(s), then such Recipient's rights granted +under Section 2(b) shall terminate as of the date such litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it fails to +comply with any of the material terms or conditions of this Agreement and does +not cure such failure in a reasonable period of time after becoming aware of +such noncompliance. If all Recipient's rights under this Agreement terminate, +Recipient agrees to cease use and distribution of the Program as soon as +reasonably practicable. However, Recipient's obligations under this Agreement +and any licenses granted by Recipient relating to the Program shall continue +and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, but in +order to avoid inconsistency the Agreement is copyrighted and may only be +modified in the following manner. The Agreement Steward reserves the right to +publish new versions (including revisions) of this Agreement from time to +time. No one other than the Agreement Steward has the right to modify this +Agreement. The Eclipse Foundation is the initial Agreement Steward. The +Eclipse Foundation may assign the responsibility to serve as the Agreement +Steward to a suitable separate entity. Each new version of the Agreement will +be given a distinguishing version number. The Program (including +Contributions) may always be distributed subject to the version of the +Agreement under which it was received. In addition, after a new version of the +Agreement is published, Contributor may elect to distribute the Program +(including its Contributions) under the new version. Except as expressly +stated in Sections 2(a) and 2(b) above, Recipient receives no rights or +licenses to the intellectual property of any Contributor under this Agreement, +whether expressly, by implication, estoppel or otherwise. All rights in the +Program not expressly granted under this Agreement are reserved. + +This Agreement is governed by the laws of the State of New York and the +intellectual property laws of the United States of America. No party to this +Agreement will bring a legal action under this Agreement more than one year +after the cause of action arose. Each party waives its rights to a jury trial +in any resulting litigation. +''', + ), + ( + 'EPL-2.0', + '''\ +Eclipse Public License - v 2.0 + + THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE + PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION + OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +1. DEFINITIONS + +"Contribution" means: + + a) in the case of the initial Contributor, the initial content + Distributed under this Agreement, and + + b) in the case of each subsequent Contributor: + i) changes to the Program, and + ii) additions to the Program; + where such changes and/or additions to the Program originate from + and are Distributed by that particular Contributor. A Contribution + "originates" from a Contributor if it was added to the Program by + such Contributor itself or anyone acting on such Contributor's behalf. + Contributions do not include changes or additions to the Program that + are not Modified Works. + +"Contributor" means any person or entity that Distributes the Program. + +"Licensed Patents" mean patent claims licensable by a Contributor which +are necessarily infringed by the use or sale of its Contribution alone +or when combined with the Program. + +"Program" means the Contributions Distributed in accordance with this +Agreement. + +"Recipient" means anyone who receives the Program under this Agreement +or any Secondary License (as applicable), including Contributors. + +"Derivative Works" shall mean any work, whether in Source Code or other +form, that is based on (or derived from) the Program and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. + +"Modified Works" shall mean any work in Source Code or other form that +results from an addition to, deletion from, or modification of the +contents of the Program, including, for purposes of clarity any new file +in Source Code form that contains any contents of the Program. Modified +Works shall not include works that contain only declarations, +interfaces, types, classes, structures, or files of the Program solely +in each case in order to link to, bind by name, or subclass the Program +or Modified Works thereof. + +"Distribute" means the acts of a) distributing or b) making available +in any manner that enables the transfer of a copy. + +"Source Code" means the form of a Program preferred for making +modifications, including but not limited to software source code, +documentation source, and configuration files. + +"Secondary License" means either the GNU General Public License, +Version 2.0, or any later versions of that license, including any +exceptions or additional permissions as identified by the initial +Contributor. + +2. GRANT OF RIGHTS + + a) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free copyright + license to reproduce, prepare Derivative Works of, publicly display, + publicly perform, Distribute and sublicense the Contribution of such + Contributor, if any, and such Derivative Works. + + b) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free patent + license under Licensed Patents to make, use, sell, offer to sell, + import and otherwise transfer the Contribution of such Contributor, + if any, in Source Code or other form. This patent license shall + apply to the combination of the Contribution and the Program if, at + the time the Contribution is added by the Contributor, such addition + of the Contribution causes such combination to be covered by the + Licensed Patents. The patent license shall not apply to any other + combinations which include the Contribution. No hardware per se is + licensed hereunder. + + c) Recipient understands that although each Contributor grants the + licenses to its Contributions set forth herein, no assurances are + provided by any Contributor that the Program does not infringe the + patent or other intellectual property rights of any other entity. + Each Contributor disclaims any liability to Recipient for claims + brought by any other entity based on infringement of intellectual + property rights or otherwise. As a condition to exercising the + rights and licenses granted hereunder, each Recipient hereby + assumes sole responsibility to secure any other intellectual + property rights needed, if any. For example, if a third party + patent license is required to allow Recipient to Distribute the + Program, it is Recipient's responsibility to acquire that license + before distributing the Program. + + d) Each Contributor represents that to its knowledge it has + sufficient copyright rights in its Contribution, if any, to grant + the copyright license set forth in this Agreement. + + e) Notwithstanding the terms of any Secondary License, no + Contributor makes additional grants to any Recipient (other than + those set forth in this Agreement) as a result of such Recipient's + receipt of the Program under the terms of a Secondary License + (if permitted under the terms of Section 3). + +3. REQUIREMENTS + +3.1 If a Contributor Distributes the Program in any form, then: + + a) the Program must also be made available as Source Code, in + accordance with section 3.2, and the Contributor must accompany + the Program with a statement that the Source Code for the Program + is available under this Agreement, and informs Recipients how to + obtain it in a reasonable manner on or through a medium customarily + used for software exchange; and + + b) the Contributor may Distribute the Program under a license + different than this Agreement, provided that such license: + i) effectively disclaims on behalf of all other Contributors all + warranties and conditions, express and implied, including + warranties or conditions of title and non-infringement, and + implied warranties or conditions of merchantability and fitness + for a particular purpose; + + ii) effectively excludes on behalf of all other Contributors all + liability for damages, including direct, indirect, special, + incidental and consequential damages, such as lost profits; + + iii) does not attempt to limit or alter the recipients' rights + in the Source Code under section 3.2; and + + iv) requires any subsequent distribution of the Program by any + party to be under a license that satisfies the requirements + of this section 3. + +3.2 When the Program is Distributed as Source Code: + + a) it must be made available under this Agreement, or if the + Program (i) is combined with other material in a separate file or + files made available under a Secondary License, and (ii) the initial + Contributor attached to the Source Code the notice described in + Exhibit A of this Agreement, then the Program may be made available + under the terms of such Secondary Licenses, and + + b) a copy of this Agreement must be included with each copy of + the Program. + +3.3 Contributors may not remove or alter any copyright, patent, +trademark, attribution notices, disclaimers of warranty, or limitations +of liability ("notices") contained within the Program from any copy of +the Program which they Distribute, provided that Contributors may add +their own appropriate notices. + +4. COMMERCIAL DISTRIBUTION + +Commercial distributors of software may accept certain responsibilities +with respect to end users, business partners and the like. While this +license is intended to facilitate the commercial use of the Program, +the Contributor who includes the Program in a commercial product +offering should do so in a manner which does not create potential +liability for other Contributors. Therefore, if a Contributor includes +the Program in a commercial product offering, such Contributor +("Commercial Contributor") hereby agrees to defend and indemnify every +other Contributor ("Indemnified Contributor") against any losses, +damages and costs (collectively "Losses") arising from claims, lawsuits +and other legal actions brought by a third party against the Indemnified +Contributor to the extent caused by the acts or omissions of such +Commercial Contributor in connection with its distribution of the Program +in a commercial product offering. The obligations in this section do not +apply to any claims or Losses relating to any actual or alleged +intellectual property infringement. In order to qualify, an Indemnified +Contributor must: a) promptly notify the Commercial Contributor in +writing of such claim, and b) allow the Commercial Contributor to control, +and cooperate with the Commercial Contributor in, the defense and any +related settlement negotiations. The Indemnified Contributor may +participate in any such claim at its own expense. + +For example, a Contributor might include the Program in a commercial +product offering, Product X. That Contributor is then a Commercial +Contributor. If that Commercial Contributor then makes performance +claims, or offers warranties related to Product X, those performance +claims and warranties are such Commercial Contributor's responsibility +alone. Under this section, the Commercial Contributor would have to +defend claims against the other Contributors related to those performance +claims and warranties, and if a court requires any other Contributor to +pay any damages as a result, the Commercial Contributor must pay +those damages. + +5. NO WARRANTY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT +PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS" +BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR +IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF +TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR +PURPOSE. Each Recipient is solely responsible for determining the +appropriateness of using and distributing the Program and assumes all +risks associated with its exercise of rights under this Agreement, +including but not limited to the risks and costs of program errors, +compliance with applicable laws, damage to or loss of data, programs +or equipment, and unavailability or interruption of operations. + +6. DISCLAIMER OF LIABILITY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT +PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS +SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST +PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE +EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + +7. GENERAL + +If any provision of this Agreement is invalid or unenforceable under +applicable law, it shall not affect the validity or enforceability of +the remainder of the terms of this Agreement, and without further +action by the parties hereto, such provision shall be reformed to the +minimum extent necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against any entity +(including a cross-claim or counterclaim in a lawsuit) alleging that the +Program itself (excluding combinations of the Program with other software +or hardware) infringes such Recipient's patent(s), then such Recipient's +rights granted under Section 2(b) shall terminate as of the date such +litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it +fails to comply with any of the material terms or conditions of this +Agreement and does not cure such failure in a reasonable period of +time after becoming aware of such noncompliance. If all Recipient's +rights under this Agreement terminate, Recipient agrees to cease use +and distribution of the Program as soon as reasonably practicable. +However, Recipient's obligations under this Agreement and any licenses +granted by Recipient relating to the Program shall continue and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, +but in order to avoid inconsistency the Agreement is copyrighted and +may only be modified in the following manner. The Agreement Steward +reserves the right to publish new versions (including revisions) of +this Agreement from time to time. No one other than the Agreement +Steward has the right to modify this Agreement. The Eclipse Foundation +is the initial Agreement Steward. The Eclipse Foundation may assign the +responsibility to serve as the Agreement Steward to a suitable separate +entity. Each new version of the Agreement will be given a distinguishing +version number. The Program (including Contributions) may always be +Distributed subject to the version of the Agreement under which it was +received. In addition, after a new version of the Agreement is published, +Contributor may elect to Distribute the Program (including its +Contributions) under the new version. + +Except as expressly stated in Sections 2(a) and 2(b) above, Recipient +receives no rights or licenses to the intellectual property of any +Contributor under this Agreement, whether expressly, by implication, +estoppel or otherwise. All rights in the Program not expressly granted +under this Agreement are reserved. Nothing in this Agreement is intended +to be enforceable by any entity that is not a Contributor or Recipient. +No third-party beneficiary rights are created under this Agreement. + +Exhibit A - Form of Secondary Licenses Notice + +"This Source Code may also be made available under the followingSecondary Licenses when the conditions for such availability set forthin the Eclipse Public License, v. 2.0 are satisfied: {name license(s), +version(s), and exceptions or additional permissions here}." + + Simply including a copy of this Agreement, including this Exhibit A + is not sufficient to license the Source Code under Secondary Licenses. + + If it is not possible or desirable to put the notice in a particular + file, then You may include the notice in a location (such as a LICENSE + file in a relevant directory) where a recipient would be likely to + look for such a notice. + + You may add additional accurate notices of copyright ownership. +''', + ), + ( + 'EUPL-1.1', + '''\ +European Union Public Licence +V. 1.1 + + +EUPL © the European Community 2007 + + +This European Union Public Licence (the “EUPL”) applies to the +Work or Software (as defined below) which is provided under the terms of this +Licence. Any use of the Work, other than as authorised under this Licence is +prohibited (to the extent such use is covered by a right of the copyright +holder of the Work). + +The Original Work is provided under the terms of this +Licence when the Licensor (as defined below) has placed the following notice +immediately following the copyright notice for the Original Work: + +Licensed under the EUPL V.1.1 + +or has expressed by any other mean his willingness to license under the EUPL. + + +1. Definitions + +In this Licence, the +following terms have the following meaning: + +- The Licence: this Licence. + +- The Original Work or the Software: the software distributed +and/or communicated by the Licensor under this Licence, available as Source +Code and also as Executable Code as the case may be. + +- Derivative Works: +the works or software that could be created by the Licensee, based upon the +Original Work or modifications thereof. This Licence does not define the +extent of modification or dependence on the Original Work required in order to +classify a work as a Derivative Work; this extent is determined by copyright +law applicable in the country mentioned in Article 15. + +- The Work: the Original Work and/or its Derivative Works. + +- The Source Code: the human-readable form of the Work which is the most +convenient for people to study and modify. + +- The Executable Code: any code which has generally been compiled and which +is meant to be interpreted by a computer as a program. + +- The Licensor: the natural or legal person that distributes and/or +communicates the Work under the Licence. + +- Contributor(s): any natural or legal person who modifies the Work under the +Licence, or otherwise contributes to the creation of a Derivative Work. + +- The Licensee or “You”: any natural or legal person who makes any usage of +the Software under the terms of the Licence. + +- Distribution and/or Communication: any act of selling, giving, lending, +renting, distributing, communicating, transmitting, or otherwise +making available, on-line or off-line, copies of the Work or providing access +to its essential functionalities at the disposal of any other natural or legal +person. + + +2. Scope of the rights granted by the Licence + +The Licensor hereby grants You a world-wide, royalty-free, non-exclusive, +sub-licensable licence to do the following, for the duration of copyright +vested in the Original Work: + +- use the Work in any circumstance and for all usage, +- reproduce the Work, +- modify the Original Work, and make Derivative Works +based upon the Work, +- communicate to the public, including the right to make available or display +the Work or copies thereof to the public and perform publicly, as the case +may be, the Work, +- distribute the Work or copies thereof, +- lend and rent the Work or copies thereof, +- sub-license rights in the Work or copies thereof. + +Those rights can be exercised on any media, supports and formats, whether now +known or later invented, as far as the applicable law permits so. + +In the countries where moral rights apply, the Licensor waives his right to +exercise his moral right to the extent allowed by law in order to make +effective the licence of the economic rights here above listed. + +The Licensor grants to the Licensee royalty-free, non exclusive usage rights +to any patents held by the Licensor, to the extent necessary to make use of +the rights granted on the Work under this Licence. + + +3. Communication of the Source Code + +The Licensor may provide the Work either +in its Source Code form, or as Executable Code. If the Work is provided as +Executable Code, the Licensor provides in addition a machine-readable copy of +the Source Code of the Work along with each copy of the Work that the Licensor +distributes or indicates, in a notice following the copyright notice attached +to the Work, a repository where the Source Code is easily and freely +accessible for as long as the Licensor continues to distribute and/or +communicate the Work. + + +4. Limitations on copyright + +Nothing in this Licence is intended to deprive the Licensee of the benefits +from any exception or limitation to the exclusive rights of the rights owners +in the Original Work or Software, of the exhaustion of those rights or of +other applicable limitations thereto. + + +5. Obligations of the Licensee + +The grant of the rights mentioned above is subject to some restrictions and +obligations imposed on the Licensee. Those obligations are the following: + +Attribution right: +the Licensee shall keep intact all copyright, patent or trademarks notices and +all notices that refer to the Licence and to the disclaimer of warranties. The +Licensee must include a copy of such notices and a copy of the Licence with +every copy of the Work he/she distributes and/or communicates. The Licensee +must cause any Derivative Work to carry prominent notices stating that the +Work has been modified and the date of modification. + +Copyleft clause: +If the Licensee distributes and/or communicates copies of the Original Works +or Derivative Works based upon the Original Work, this Distribution and/or +Communication will be done under the terms of this Licence or of a later +version of this Licence unless the Original Work is expressly distributed only +under this version of the Licence. The Licensee (becoming Licensor) cannot +offer or impose any additional terms or conditions on the Work or Derivative +Work that alter or restrict the terms of the Licence. + +Compatibility clause: +If the Licensee Distributes and/or Communicates Derivative Works or copies +thereof based upon both the Original Work and another work licensed under a +Compatible Licence, this Distribution and/or Communication can be done under +the terms of this Compatible Licence. For the sake of this clause, +“Compatible Licence” refers to the licences listed in the appendix +attached to this Licence. Should the Licensee’s obligations under the +Compatible Licence conflict with his/her obligations under this Licence, the +obligations of the Compatible Licence shall prevail. + +Provision of Source Code: +When distributing and/or communicating copies of the Work, the Licensee +will provide a machine-readable copy of the Source Code or indicate a +repository where this Source will be easily and freely available for as long +as the Licensee continues to distribute and/or communicate the Work. + +Legal Protection: +This Licence does not grant permission to use the trade names, +trademarks, service marks, or names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the copyright notice. + + +6. Chain of Authorship + +The original Licensor warrants that the copyright in the Original Work +granted hereunder is owned by him/her or licensed to him/her and +that he/she has the power and authority to grant the Licence. + +Each Contributor warrants that the copyright in the modifications he/she +brings to the Work are owned by him/her or licensed to him/her and that +he/she has the power and authority to grant the Licence. + +Each time You accept the Licence, the original Licensor and subsequent +Contributors grant You a licence to their contributions to the Work, under +the terms of this Licence. + + +7. Disclaimer of Warranty + +The Work is a work in progress, which is continuously improved by numerous +contributors. It is not a finished work and may therefore contain defects or +“bugs” inherent to this type of software development. + +For the above reason, the Work is provided under the Licence on an “as is” +basis and without warranties of any kind concerning the Work, including +without limitation merchantability, fitness for a particular purpose, absence +of defects or errors, accuracy, non-infringement of intellectual property +rights other than copyright as stated in Article 6 of this Licence. + +This disclaimer of warranty is an essential part of the Licence and a +condition for the grant of any rights to the Work. + + +8. Disclaimer of Liability + +Except in the cases of wilful misconduct or damages directly caused to +natural persons, the Licensor will in no event be liable for any direct or +indirect, material or moral, damages of any kind, arising out of the Licence +or of the use of the Work, including without limitation, +damages for loss of goodwill, work stoppage, computer failure or malfunction, +loss of data or any commercial damage, even if the Licensor has been advised +of the possibility of such damage. However, the Licensor will be liable under +statutory product liability laws as far such laws apply to the Work. + + +9. Additional agreements + +While distributing the Original Work or Derivative Works, You may choose +to conclude an additional agreement to offer, and charge a fee for, +acceptance of support, warranty, indemnity, or other liability +obligations and/or services consistent with this Licence. However, in +accepting such obligations, You may act only on your own behalf and on your +sole responsibility, not on behalf of the original Licensor or any other +Contributor, and only if You agree to indemnify, defend, and hold each +Contributor harmless for any liability incurred by, or claims asserted against +such Contributor by the fact You have accepted any such warranty or additional +liability. + + +10. Acceptance of the Licence + +The provisions of this Licence can be accepted by clicking on +an icon “I agree” placed under the bottom of a window displaying the text of +this Licence or by affirming consent in any other similar way, in accordance +with the rules of applicable law. Clicking on that icon indicates your clear +and irrevocable acceptance of this Licence and +all of its terms and conditions. + +Similarly, you irrevocably accept this Licence and +all of its terms and conditions by exercising any rights granted to You +by Article 2 of this Licence, such as the use of the Work, +the creation by You of a Derivative Work or the Distribution and/or +Communication by You of the Work or copies thereof. + + +11. Information to the public + +In case of any Distribution and/or Communication of the Work by means of +electronic communication by You (for example, by offering to download +the Work from a remote location) the distribution channel or media (for +example, a website) must at least provide to the public the information +requested by the applicable law regarding the Licensor, the Licence and the +way it may be accessible, concluded, stored and reproduced by the +Licensee. + + +12. Termination of the Licence + +The Licence and the rights granted hereunder will terminate automatically +upon any breach by the Licensee of the terms of the Licence. + +Such a termination will not terminate the licences of any person who has +received the Work from the Licensee under the Licence, provided such persons +remain in full compliance with the Licence. + + +13. Miscellaneous + +Without prejudice of Article 9 above, the Licence represents the complete +agreement between the Parties as to the Work licensed hereunder. + +If any provision of the Licence is invalid or unenforceable under applicable +law, this will not affect the validity or enforceability of the Licence as a +whole. Such provision will be construed and/or reformed so as necessary +to make it valid and enforceable. + +The European Commission may publish other linguistic versions and/or new +versions of this Licence, so far this is required and reasonable, without +reducing the scope of the rights granted by the Licence. +New versions of the Licence will be published with a unique version number. + +All linguistic versions of this Licence, approved by the European Commission, +have identical value. Parties can take advantage of the linguistic version +of their choice. + + +14. Jurisdiction + +Any litigation resulting from the interpretation of this License, arising +between the European Commission, as a Licensor, and any Licensee, +will be subject to the jurisdiction of the Court of Justice of the +European Communities, as laid down in article 238 of the Treaty establishing +the European Community. + +Any litigation arising between Parties, other than the European Commission, +and resulting from the interpretation of this License, will be subject to the +exclusive jurisdiction of the competent court where the Licensor resides or +conducts its primary business. + + +15. Applicable Law + +This Licence shall be governed by the law of the European Union country where +the Licensor resides or has his registered office. + +This licence shall be governed by the Belgian law if: + +- a litigation arises between the European Commission, as a Licensor, and any +Licensee; +- the Licensor, other than the European Commission, has no residence or +registered office inside a European Union country. + + +=== + + +Appendix + + +“Compatible Licences” according to article 5 EUPL are: +- GNU General Public License (GNU GPL) v. 2 +- Open Software License (OSL) v. 2.1, v. 3.0 +- Common Public License v. 1.0 +- Eclipse Public License v. 1.0 +- Cecill v. 2.0 +''', + ), + ( + 'EUPL-1.2', + '''\ +European Union Public Licence +V. 1.2 + +EUPL © the European Union 2007, 2016 + +This European Union Public Licence (the ‘EUPL’) applies to the Work (as +defined below) which is provided under the terms of this Licence. Any use of +the Work, other than as authorised under this Licence is prohibited (to the +extent such use is covered by a right of the copyright holder of the Work). + +The Work is provided under the terms of this Licence when the Licensor (as +defined below) has placed the following notice immediately following the +copyright notice for the Work: “Licensed under the EUPL”, or has expressed by +any other means his willingness to license under the EUPL. + +1. Definitions + +In this Licence, the following terms have the following meaning: +— ‘The Licence’: this Licence. +— ‘The Original Work’: the work or software distributed or communicated by the + ‘Licensor under this Licence, available as Source Code and also as + ‘Executable Code as the case may be. +— ‘Derivative Works’: the works or software that could be created by the + ‘Licensee, based upon the Original Work or modifications thereof. This + ‘Licence does not define the extent of modification or dependence on the + ‘Original Work required in order to classify a work as a Derivative Work; + ‘this extent is determined by copyright law applicable in the country + ‘mentioned in Article 15. +— ‘The Work’: the Original Work or its Derivative Works. +— ‘The Source Code’: the human-readable form of the Work which is the most + convenient for people to study and modify. + +— ‘The Executable Code’: any code which has generally been compiled and which + is meant to be interpreted by a computer as a program. +— ‘The Licensor’: the natural or legal person that distributes or communicates + the Work under the Licence. +— ‘Contributor(s)’: any natural or legal person who modifies the Work under + the Licence, or otherwise contributes to the creation of a Derivative Work. +— ‘The Licensee’ or ‘You’: any natural or legal person who makes any usage of + the Work under the terms of the Licence. +— ‘Distribution’ or ‘Communication’: any act of selling, giving, lending, + renting, distributing, communicating, transmitting, or otherwise making + available, online or offline, copies of the Work or providing access to its + essential functionalities at the disposal of any other natural or legal + person. + +2. Scope of the rights granted by the Licence + +The Licensor hereby grants You a worldwide, royalty-free, non-exclusive, +sublicensable licence to do the following, for the duration of copyright +vested in the Original Work: + +— use the Work in any circumstance and for all usage, +— reproduce the Work, +— modify the Work, and make Derivative Works based upon the Work, +— communicate to the public, including the right to make available or display + the Work or copies thereof to the public and perform publicly, as the case + may be, the Work, +— distribute the Work or copies thereof, +— lend and rent the Work or copies thereof, +— sublicense rights in the Work or copies thereof. + +Those rights can be exercised on any media, supports and formats, whether now +known or later invented, as far as the applicable law permits so. + +In the countries where moral rights apply, the Licensor waives his right to +exercise his moral right to the extent allowed by law in order to make +effective the licence of the economic rights here above listed. + +The Licensor grants to the Licensee royalty-free, non-exclusive usage rights +to any patents held by the Licensor, to the extent necessary to make use of +the rights granted on the Work under this Licence. + +3. Communication of the Source Code + +The Licensor may provide the Work either in its Source Code form, or as +Executable Code. If the Work is provided as Executable Code, the Licensor +provides in addition a machine-readable copy of the Source Code of the Work +along with each copy of the Work that the Licensor distributes or indicates, +in a notice following the copyright notice attached to the Work, a repository +where the Source Code is easily and freely accessible for as long as the +Licensor continues to distribute or communicate the Work. + +4. Limitations on copyright + +Nothing in this Licence is intended to deprive the Licensee of the benefits +from any exception or limitation to the exclusive rights of the rights owners +in the Work, of the exhaustion of those rights or of other applicable +limitations thereto. + +5. Obligations of the Licensee + +The grant of the rights mentioned above is subject to some restrictions and +obligations imposed on the Licensee. Those obligations are the following: + +Attribution right: The Licensee shall keep intact all copyright, patent or +trademarks notices and all notices that refer to the Licence and to the +disclaimer of warranties. The Licensee must include a copy of such notices and +a copy of the Licence with every copy of the Work he/she distributes or +communicates. The Licensee must cause any Derivative Work to carry prominent +notices stating that the Work has been modified and the date of modification. + +Copyleft clause: If the Licensee distributes or communicates copies of the +Original Works or Derivative Works, this Distribution or Communication will be +done under the terms of this Licence or of a later version of this Licence +unless the Original Work is expressly distributed only under this version of +the Licence — for example by communicating ‘EUPL v. 1.2 only’. The Licensee +(becoming Licensor) cannot offer or impose any additional terms or conditions +on the Work or Derivative Work that alter or restrict the terms of the +Licence. + +Compatibility clause: If the Licensee Distributes or Communicates Derivative +Works or copies thereof based upon both the Work and another work licensed +under a Compatible Licence, this Distribution or Communication can be done +under the terms of this Compatible Licence. For the sake of this clause, +‘Compatible Licence’ refers to the licences listed in the appendix attached to +this Licence. Should the Licensee's obligations under the Compatible Licence +conflict with his/her obligations under this Licence, the obligations of the +Compatible Licence shall prevail. + +Provision of Source Code: When distributing or communicating copies of the +Work, the Licensee will provide a machine-readable copy of the Source Code or +indicate a repository where this Source will be easily and freely available +for as long as the Licensee continues to distribute or communicate the Work. + +Legal Protection: This Licence does not grant permission to use the trade +names, trademarks, service marks, or names of the Licensor, except as required +for reasonable and customary use in describing the origin of the Work and +reproducing the content of the copyright notice. + +6. Chain of Authorship + +The original Licensor warrants that the copyright in the Original Work granted +hereunder is owned by him/her or licensed to him/her and that he/she has the +power and authority to grant the Licence. + +Each Contributor warrants that the copyright in the modifications he/she +brings to the Work are owned by him/her or licensed to him/her and that he/she +has the power and authority to grant the Licence. + +Each time You accept the Licence, the original Licensor and subsequent +Contributors grant You a licence to their contributions to the Work, under the +terms of this Licence. + +7. Disclaimer of Warranty + +The Work is a work in progress, which is continuously improved by numerous +Contributors. It is not a finished work and may therefore contain defects or +‘bugs’ inherent to this type of development. + +For the above reason, the Work is provided under the Licence on an ‘as is’ +basis and without warranties of any kind concerning the Work, including +without limitation merchantability, fitness for a particular purpose, absence +of defects or errors, accuracy, non-infringement of intellectual property +rights other than copyright as stated in Article 6 of this Licence. + +This disclaimer of warranty is an essential part of the Licence and a +condition for the grant of any rights to the Work. + +8. Disclaimer of Liability + +Except in the cases of wilful misconduct or damages directly caused to natural +persons, the Licensor will in no event be liable for any direct or indirect, +material or moral, damages of any kind, arising out of the Licence or of the +use of the Work, including without limitation, damages for loss of goodwill, +work stoppage, computer failure or malfunction, loss of data or any commercial +damage, even if the Licensor has been advised of the possibility of such +damage. However, the Licensor will be liable under statutory product liability +laws as far such laws apply to the Work. + +9. Additional agreements + +While distributing the Work, You may choose to conclude an additional +agreement, defining obligations or services consistent with this Licence. +However, if accepting obligations, You may act only on your own behalf and on +your sole responsibility, not on behalf of the original Licensor or any other +Contributor, and only if You agree to indemnify, defend, and hold each +Contributor harmless for any liability incurred by, or claims asserted against +such Contributor by the fact You have accepted any warranty or additional +liability. + +10. Acceptance of the Licence + +The provisions of this Licence can be accepted by clicking on an icon ‘I +agree’ placed under the bottom of a window displaying the text of this Licence +or by affirming consent in any other similar way, in accordance with the rules +of applicable law. Clicking on that icon indicates your clear and irrevocable +acceptance of this Licence and all of its terms and conditions. + +Similarly, you irrevocably accept this Licence and all of its terms and +conditions by exercising any rights granted to You by Article 2 of this +Licence, such as the use of the Work, the creation by You of a Derivative Work +or the Distribution or Communication by You of the Work or copies thereof. + +11. Information to the public + +In case of any Distribution or Communication of the Work by means of +electronic communication by You (for example, by offering to download the Work +from a remote location) the distribution channel or media (for example, a +website) must at least provide to the public the information requested by the +applicable law regarding the Licensor, the Licence and the way it may be +accessible, concluded, stored and reproduced by the Licensee. + +12. Termination of the Licence + +The Licence and the rights granted hereunder will terminate automatically upon +any breach by the Licensee of the terms of the Licence. Such a termination +will not terminate the licences of any person who has received the Work from +the Licensee under the Licence, provided such persons remain in full +compliance with the Licence. + +13. Miscellaneous + +Without prejudice of Article 9 above, the Licence represents the complete +agreement between the Parties as to the Work. + +If any provision of the Licence is invalid or unenforceable under applicable +law, this will not affect the validity or enforceability of the Licence as a +whole. Such provision will be construed or reformed so as necessary to make it +valid and enforceable. + +The European Commission may publish other linguistic versions or new versions +of this Licence or updated versions of the Appendix, so far this is required +and reasonable, without reducing the scope of the rights granted by the +Licence. New versions of the Licence will be published with a unique version +number. + +All linguistic versions of this Licence, approved by the European Commission, +have identical value. Parties can take advantage of the linguistic version of +their choice. + +14. Jurisdiction + +Without prejudice to specific agreement between parties, +— any litigation resulting from the interpretation of this License, arising + between the European Union institutions, bodies, offices or agencies, as a + Licensor, and any Licensee, will be subject to the jurisdiction of the Court + of Justice of the European Union, as laid down in article 272 of the Treaty + on the Functioning of the European Union, +— any litigation arising between other parties and resulting from the + interpretation of this License, will be subject to the exclusive + jurisdiction of the competent court where the Licensor resides or conducts + its primary business. + +15. Applicable Law + +Without prejudice to specific agreement between parties, +— this Licence shall be governed by the law of the European Union Member State + where the Licensor has his seat, resides or has his registered office, +— this licence shall be governed by Belgian law if the Licensor has no seat, + residence or registered office inside a European Union Member State. + +Appendix + +‘Compatible Licences’ according to Article 5 EUPL are: +— GNU General Public License (GPL) v. 2, v. 3 +— GNU Affero General Public License (AGPL) v. 3 +— Open Software License (OSL) v. 2.1, v. 3.0 +— Eclipse Public License (EPL) v. 1.0 +— CeCILL v. 2.0, v. 2.1 +— Mozilla Public Licence (MPL) v. 2 +— GNU Lesser General Public Licence (LGPL) v. 2.1, v. 3 +— Creative Commons Attribution-ShareAlike v. 3.0 Unported (CC BY-SA 3.0) for + works other than software +— European Union Public Licence (EUPL) v. 1.1, v. 1.2 +— Québec Free and Open-Source Licence — Reciprocity (LiLiQ-R) or + Strong Reciprocity (LiLiQ-R+) + +— The European Commission may update this Appendix to later versions of the + above licences without producing a new version of the EUPL, as long as they + provide the rights granted in Article 2 of this Licence and protect the + covered Source Code from exclusive appropriation. +— All other changes or additions to this Appendix require the production of a + new EUPL version. +''', + ), + ( + 'GPL-2.0', + '''\ +GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. +''', + ), + ( + 'GPL-3.0', + '''\ +GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. +''', + ), + ( + 'ISC', + '''\ +ISC License + +Copyright (c) [year], [fullname] + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +''', + ), + ( + 'LGPL-2.1', + '''\ +GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 + USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random + Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! +''', + ), + ( + 'LGPL-3.0', + '''\ +GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. +''', + ), + ( + 'LPPL-1.3c', + '''\ +The LaTeX Project Public License +=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + +LPPL Version 1.3c 2008-05-04 + +Copyright 1999 2002-2008 LaTeX3 Project + Everyone is allowed to distribute verbatim copies of this + license document, but modification of it is not allowed. + + +PREAMBLE +======== + +The LaTeX Project Public License (LPPL) is the primary license under +which the LaTeX kernel and the base LaTeX packages are distributed. + +You may use this license for any work of which you hold the copyright +and which you wish to distribute. This license may be particularly +suitable if your work is TeX-related (such as a LaTeX package), but +it is written in such a way that you can use it even if your work is +unrelated to TeX. + +The section `WHETHER AND HOW TO DISTRIBUTE WORKS UNDER THIS LICENSE', +below, gives instructions, examples, and recommendations for authors +who are considering distributing their works under this license. + +This license gives conditions under which a work may be distributed +and modified, as well as conditions under which modified versions of +that work may be distributed. + +We, the LaTeX3 Project, believe that the conditions below give you +the freedom to make and distribute modified versions of your work +that conform with whatever technical specifications you wish while +maintaining the availability, integrity, and reliability of +that work. If you do not see how to achieve your goal while +meeting these conditions, then read the document `cfgguide.tex' +and `modguide.tex' in the base LaTeX distribution for suggestions. + + +DEFINITIONS +=========== + +In this license document the following terms are used: + + `Work' + Any work being distributed under this License. + + `Derived Work' + Any work that under any applicable law is derived from the Work. + + `Modification' + Any procedure that produces a Derived Work under any applicable + law -- for example, the production of a file containing an + original file associated with the Work or a significant portion of + such a file, either verbatim or with modifications and/or + translated into another language. + + `Modify' + To apply any procedure that produces a Derived Work under any + applicable law. + + `Distribution' + Making copies of the Work available from one person to another, in + whole or in part. Distribution includes (but is not limited to) + making any electronic components of the Work accessible by + file transfer protocols such as FTP or HTTP or by shared file + systems such as Sun's Network File System (NFS). + + `Compiled Work' + A version of the Work that has been processed into a form where it + is directly usable on a computer system. This processing may + include using installation facilities provided by the Work, + transformations of the Work, copying of components of the Work, or + other activities. Note that modification of any installation + facilities provided by the Work constitutes modification of the Work. + + `Current Maintainer' + A person or persons nominated as such within the Work. If there is + no such explicit nomination then it is the `Copyright Holder' under + any applicable law. + + `Base Interpreter' + A program or process that is normally needed for running or + interpreting a part or the whole of the Work. + + A Base Interpreter may depend on external components but these + are not considered part of the Base Interpreter provided that each + external component clearly identifies itself whenever it is used + interactively. Unless explicitly specified when applying the + license to the Work, the only applicable Base Interpreter is a + `LaTeX-Format' or in the case of files belonging to the + `LaTeX-format' a program implementing the `TeX language'. + + + +CONDITIONS ON DISTRIBUTION AND MODIFICATION +=========================================== + +1. Activities other than distribution and/or modification of the Work +are not covered by this license; they are outside its scope. In +particular, the act of running the Work is not restricted and no +requirements are made concerning any offers of support for the Work. + +2. You may distribute a complete, unmodified copy of the Work as you +received it. Distribution of only part of the Work is considered +modification of the Work, and no right to distribute such a Derived +Work may be assumed under the terms of this clause. + +3. You may distribute a Compiled Work that has been generated from a +complete, unmodified copy of the Work as distributed under Clause 2 +above, as long as that Compiled Work is distributed in such a way that +the recipients may install the Compiled Work on their system exactly +as it would have been installed if they generated a Compiled Work +directly from the Work. + +4. If you are the Current Maintainer of the Work, you may, without +restriction, modify the Work, thus creating a Derived Work. You may +also distribute the Derived Work without restriction, including +Compiled Works generated from the Derived Work. Derived Works +distributed in this manner by the Current Maintainer are considered to +be updated versions of the Work. + +5. If you are not the Current Maintainer of the Work, you may modify +your copy of the Work, thus creating a Derived Work based on the Work, +and compile this Derived Work, thus creating a Compiled Work based on +the Derived Work. + +6. If you are not the Current Maintainer of the Work, you may +distribute a Derived Work provided the following conditions are met +for every component of the Work unless that component clearly states +in the copyright notice that it is exempt from that condition. Only +the Current Maintainer is allowed to add such statements of exemption +to a component of the Work. + + a. If a component of this Derived Work can be a direct replacement + for a component of the Work when that component is used with the + Base Interpreter, then, wherever this component of the Work + identifies itself to the user when used interactively with that + Base Interpreter, the replacement component of this Derived Work + clearly and unambiguously identifies itself as a modified version + of this component to the user when used interactively with that + Base Interpreter. + + b. Every component of the Derived Work contains prominent notices + detailing the nature of the changes to that component, or a + prominent reference to another file that is distributed as part + of the Derived Work and that contains a complete and accurate log + of the changes. + + c. No information in the Derived Work implies that any persons, + including (but not limited to) the authors of the original version + of the Work, provide any support, including (but not limited to) + the reporting and handling of errors, to recipients of the + Derived Work unless those persons have stated explicitly that + they do provide such support for the Derived Work. + + d. You distribute at least one of the following with the Derived Work: + + 1. A complete, unmodified copy of the Work; + if your distribution of a modified component is made by + offering access to copy the modified component from a + designated place, then offering equivalent access to copy + the Work from the same or some similar place meets this + condition, even though third parties are not compelled to + copy the Work along with the modified component; + + 2. Information that is sufficient to obtain a complete, + unmodified copy of the Work. + +7. If you are not the Current Maintainer of the Work, you may +distribute a Compiled Work generated from a Derived Work, as long as +the Derived Work is distributed to all recipients of the Compiled +Work, and as long as the conditions of Clause 6, above, are met with +regard to the Derived Work. + +8. The conditions above are not intended to prohibit, and hence do not +apply to, the modification, by any method, of any component so that it +becomes identical to an updated version of that component of the Work as +it is distributed by the Current Maintainer under Clause 4, above. + +9. Distribution of the Work or any Derived Work in an alternative +format, where the Work or that Derived Work (in whole or in part) is +then produced by applying some process to that format, does not relax or +nullify any sections of this license as they pertain to the results of +applying that process. + +10. a. A Derived Work may be distributed under a different license + provided that license itself honors the conditions listed in + Clause 6 above, in regard to the Work, though it does not have + to honor the rest of the conditions in this license. + + b. If a Derived Work is distributed under a different license, that + Derived Work must provide sufficient documentation as part of + itself to allow each recipient of that Derived Work to honor the + restrictions in Clause 6 above, concerning changes from the Work. + +11. This license places no restrictions on works that are unrelated to +the Work, nor does this license place any restrictions on aggregating +such works with the Work by any means. + +12. Nothing in this license is intended to, or may be used to, prevent +complete compliance by all parties with all applicable laws. + + +NO WARRANTY +=========== + +There is no warranty for the Work. Except when otherwise stated in +writing, the Copyright Holder provides the Work `as is', without +warranty of any kind, either expressed or implied, including, but not +limited to, the implied warranties of merchantability and fitness for a +particular purpose. The entire risk as to the quality and performance +of the Work is with you. Should the Work prove defective, you assume +the cost of all necessary servicing, repair, or correction. + +In no event unless required by applicable law or agreed to in writing +will The Copyright Holder, or any author named in the components of the +Work, or any other party who may distribute and/or modify the Work as +permitted above, be liable to you for damages, including any general, +special, incidental or consequential damages arising out of any use of +the Work or out of inability to use the Work (including, but not limited +to, loss of data, data being rendered inaccurate, or losses sustained by +anyone as a result of any failure of the Work to operate with any other +programs), even if the Copyright Holder or said author or said other +party has been advised of the possibility of such damages. + + +MAINTENANCE OF THE WORK +======================= + +The Work has the status `author-maintained' if the Copyright Holder +explicitly and prominently states near the primary copyright notice in +the Work that the Work can only be maintained by the Copyright Holder +or simply that it is `author-maintained'. + +The Work has the status `maintained' if there is a Current Maintainer +who has indicated in the Work that they are willing to receive error +reports for the Work (for example, by supplying a valid e-mail +address). It is not required for the Current Maintainer to acknowledge +or act upon these error reports. + +The Work changes from status `maintained' to `unmaintained' if there +is no Current Maintainer, or the person stated to be Current +Maintainer of the work cannot be reached through the indicated means +of communication for a period of six months, and there are no other +significant signs of active maintenance. + +You can become the Current Maintainer of the Work by agreement with +any existing Current Maintainer to take over this role. + +If the Work is unmaintained, you can become the Current Maintainer of +the Work through the following steps: + + 1. Make a reasonable attempt to trace the Current Maintainer (and + the Copyright Holder, if the two differ) through the means of + an Internet or similar search. + + 2. If this search is successful, then enquire whether the Work + is still maintained. + + a. If it is being maintained, then ask the Current Maintainer + to update their communication data within one month. + + b. If the search is unsuccessful or no action to resume active + maintenance is taken by the Current Maintainer, then announce + within the pertinent community your intention to take over + maintenance. (If the Work is a LaTeX work, this could be + done, for example, by posting to comp.text.tex.) + + 3a. If the Current Maintainer is reachable and agrees to pass + maintenance of the Work to you, then this takes effect + immediately upon announcement. + + b. If the Current Maintainer is not reachable and the Copyright + Holder agrees that maintenance of the Work be passed to you, + then this takes effect immediately upon announcement. + + 4. If you make an `intention announcement' as described in 2b. above + and after three months your intention is challenged neither by + the Current Maintainer nor by the Copyright Holder nor by other + people, then you may arrange for the Work to be changed so as + to name you as the (new) Current Maintainer. + + 5. If the previously unreachable Current Maintainer becomes + reachable once more within three months of a change completed + under the terms of 3b) or 4), then that Current Maintainer must + become or remain the Current Maintainer upon request provided + they then update their communication data within one month. + +A change in the Current Maintainer does not, of itself, alter the fact +that the Work is distributed under the LPPL license. + +If you become the Current Maintainer of the Work, you should +immediately provide, within the Work, a prominent and unambiguous +statement of your status as Current Maintainer. You should also +announce your new status to the same pertinent community as +in 2b) above. + + +WHETHER AND HOW TO DISTRIBUTE WORKS UNDER THIS LICENSE +====================================================== + +This section contains important instructions, examples, and +recommendations for authors who are considering distributing their +works under this license. These authors are addressed as `you' in +this section. + +Choosing This License or Another License +---------------------------------------- + +If for any part of your work you want or need to use *distribution* +conditions that differ significantly from those in this license, then +do not refer to this license anywhere in your work but, instead, +distribute your work under a different license. You may use the text +of this license as a model for your own license, but your license +should not refer to the LPPL or otherwise give the impression that +your work is distributed under the LPPL. + +The document `modguide.tex' in the base LaTeX distribution explains +the motivation behind the conditions of this license. It explains, +for example, why distributing LaTeX under the GNU General Public +License (GPL) was considered inappropriate. Even if your work is +unrelated to LaTeX, the discussion in `modguide.tex' may still be +relevant, and authors intending to distribute their works under any +license are encouraged to read it. + +A Recommendation on Modification Without Distribution +----------------------------------------------------- + +It is wise never to modify a component of the Work, even for your own +personal use, without also meeting the above conditions for +distributing the modified component. While you might intend that such +modifications will never be distributed, often this will happen by +accident -- you may forget that you have modified that component; or +it may not occur to you when allowing others to access the modified +version that you are thus distributing it and violating the conditions +of this license in ways that could have legal implications and, worse, +cause problems for the community. It is therefore usually in your +best interest to keep your copy of the Work identical with the public +one. Many works provide ways to control the behavior of that work +without altering any of its licensed components. + +How to Use This License +----------------------- + +To use this license, place in each of the components of your work both +an explicit copyright notice including your name and the year the work +was authored and/or last substantially modified. Include also a +statement that the distribution and/or modification of that +component is constrained by the conditions in this license. + +Here is an example of such a notice and statement: + + %% pig.dtx + %% Copyright 2005 M. Y. Name + % + % This work may be distributed and/or modified under the + % conditions of the LaTeX Project Public License, either version 1.3 + % of this license or (at your option) any later version. + % The latest version of this license is in + % http://www.latex-project.org/lppl.txt + % and version 1.3 or later is part of all distributions of LaTeX + % version 2005/12/01 or later. + % + % This work has the LPPL maintenance status `maintained'. + % + % The Current Maintainer of this work is M. Y. Name. + % + % This work consists of the files pig.dtx and pig.ins + % and the derived file pig.sty. + +Given such a notice and statement in a file, the conditions +given in this license document would apply, with the `Work' referring +to the three files `pig.dtx', `pig.ins', and `pig.sty' (the last being +generated from `pig.dtx' using `pig.ins'), the `Base Interpreter' +referring to any `LaTeX-Format', and both `Copyright Holder' and +`Current Maintainer' referring to the person `M. Y. Name'. + +If you do not want the Maintenance section of LPPL to apply to your +Work, change `maintained' above into `author-maintained'. +However, we recommend that you use `maintained', as the Maintenance +section was added in order to ensure that your Work remains useful to +the community even when you can no longer maintain and support it +yourself. + +Derived Works That Are Not Replacements +--------------------------------------- + +Several clauses of the LPPL specify means to provide reliability and +stability for the user community. They therefore concern themselves +with the case that a Derived Work is intended to be used as a +(compatible or incompatible) replacement of the original Work. If +this is not the case (e.g., if a few lines of code are reused for a +completely different task), then clauses 6b and 6d shall not apply. + + +Important Recommendations +------------------------- + + Defining What Constitutes the Work + + The LPPL requires that distributions of the Work contain all the + files of the Work. It is therefore important that you provide a + way for the licensee to determine which files constitute the Work. + This could, for example, be achieved by explicitly listing all the + files of the Work near the copyright notice of each file or by + using a line such as: + + % This work consists of all files listed in manifest.txt. + + in that place. In the absence of an unequivocal list it might be + impossible for the licensee to determine what is considered by you + to comprise the Work and, in such a case, the licensee would be + entitled to make reasonable conjectures as to which files comprise + the Work. +''', + ), + ( + 'MIT', + '''\ +MIT License + +Copyright (c) [year] [fullname] + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +''', + ), + ( + 'MPL-2.0', + '''\ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. +''', + ), + ( + 'MS-PL', + '''\ +Microsoft Public License (Ms-PL) + +This license governs use of the accompanying software. If you use the +software, you accept this license. If you do not accept the license, do not +use the software. + +1. Definitions +The terms "reproduce," "reproduction," "derivative works," and "distribution" +have the same meaning here as under U.S. copyright law. A "contribution" is +the original software, or any additions or changes to the software. A +"contributor" is any person that distributes its contribution under this +license. "Licensed patents" are a contributor's patent claims that read +directly on its contribution. + +2. Grant of Rights + (A) Copyright Grant- Subject to the terms of this license, including the + license conditions and limitations in section 3, each contributor grants + you a non-exclusive, worldwide, royalty-free copyright license to + reproduce its contribution, prepare derivative works of its contribution, + and distribute its contribution or any derivative works that you create. + + (B) Patent Grant- Subject to the terms of this license, including the + license conditions and limitations in section 3, each contributor grants + you a non-exclusive, worldwide, royalty-free license under its licensed + patents to make, have made, use, sell, offer for sale, import, and/or + otherwise dispose of its contribution in the software or derivative works + of the contribution in the software. + +3. Conditions and Limitations + (A) No Trademark License- This license does not grant you rights to use + any contributors' name, logo, or trademarks. + + (B) If you bring a patent claim against any contributor over patents that + you claim are infringed by the software, your patent license from such + contributor to the software ends automatically. + + (C) If you distribute any portion of the software, you must retain all + copyright, patent, trademark, and attribution notices that are present in + the software. + + (D) If you distribute any portion of the software in source code form, + you may do so only under this license by including a complete copy of + this license with your distribution. If you distribute any portion of the + software in compiled or object code form, you may only do so under a + license that complies with this license. + + (E) The software is licensed "as-is." You bear the risk of using it. The + contributors give no express warranties, guarantees, or conditions. You + may have additional consumer rights under your local laws which this + license cannot change. To the extent permitted under your local laws, the + contributors exclude the implied warranties of merchantability, fitness + for a particular purpose and non-infringement. +''', + ), + ( + 'MS-RL', + '''\ +Microsoft Reciprocal License (Ms-RL) + +This license governs use of the accompanying software. If you use the +software, you accept this license. If you do not accept the license, do not +use the software. + +1. Definitions +The terms "reproduce," "reproduction," "derivative works," and "distribution" +have the same meaning here as under U.S. copyright law. + +A "contribution" is the original software, or any additions or changes to the +software. + +A "contributor" is any person that distributes its contribution under this +license. + +"Licensed patents" are a contributor's patent claims that read directly on its +contribution. + +2. Grant of Rights + (A) Copyright Grant- Subject to the terms of this license, including the + license conditions and limitations in section 3, each contributor grants + you a non-exclusive, worldwide, royalty-free copyright license to + reproduce its contribution, prepare derivative works of its contribution, + and distribute its contribution or any derivative works that you create. + + (B) Patent Grant- Subject to the terms of this license, including the + license conditions and limitations in section 3, each contributor grants + you a non-exclusive, worldwide, royalty-free license under its licensed + patents to make, have made, use, sell, offer for sale, import, and/or + otherwise dispose of its contribution in the software or derivative works + of the contribution in the software. + +3. Conditions and Limitations + (A) Reciprocal Grants- For any file you distribute that contains code + from the software (in source code or binary format), you must provide + recipients the source code to that file along with a copy of this + license, which license will govern that file. You may license other files + that are entirely your own work and do not contain code from the software + under any terms you choose. + + (B) No Trademark License- This license does not grant you rights to use + any contributors' name, logo, or trademarks. + + (C) If you bring a patent claim against any contributor over patents that + you claim are infringed by the software, your patent license from such + contributor to the software ends automatically. + + (D) If you distribute any portion of the software, you must retain all + copyright, patent, trademark, and attribution notices that are present in + the software. + + (E) If you distribute any portion of the software in source code form, + you may do so only under this license by including a complete copy of + this license with your distribution. If you distribute any portion of the + software in compiled or object code form, you may only do so under a + license that complies with this license. + + (F) The software is licensed "as-is." You bear the risk of using it. The + contributors give no express warranties, guarantees, or conditions. You + may have additional consumer rights under your local laws which this + license cannot change. To the extent permitted under your local laws, the + contributors exclude the implied warranties of merchantability, fitness + for a particular purpose and non-infringement. +''', + ), + ( + 'NCSA', + '''\ +University of Illinois/NCSA Open Source License +Copyright (c) [year] [fullname]. All rights reserved. +Developed by: [project] [fullname] [projecturl] + Permission is hereby granted, free of charge, to any personobtaining a copy of this software and associated documentation files(the "Software"), to deal with the Software without restriction,including without limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of the Software,and to permit persons to whom the Software is furnished to do so,subject to the following conditions: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimers. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimers in the documentation and/or other materials provided with the distribution. + +* Neither the names of [fullname], [project] nor the names of its contributors may be used to endorse or promote products derived from + this Software without specific prior written permission. + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESSOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THECONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHERLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH +THE SOFTWARE. +''', + ), + ( + 'OFL-1.1', + '''\ +Copyright (c) [year] [fullname] ([email]) + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION AND CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. +''', + ), + ( + 'OSL-3.0', + '''\ +Open Software License ("OSL") v 3.0 + +This Open Software License (the "License") applies to any original work of +authorship (the "Original Work") whose owner (the "Licensor") has placed the +following licensing notice adjacent to the copyright notice for the Original +Work: + +Licensed under the Open Software License version 3.0 + +1) Grant of Copyright License. Licensor grants You a worldwide, royalty-free, +non-exclusive, sublicensable license, for the duration of the copyright, to do +the following: + + a) to reproduce the Original Work in copies, either alone or as part of a + collective work; + + b) to translate, adapt, alter, transform, modify, or arrange the Original + Work, thereby creating derivative works ("Derivative Works") based upon the + Original Work; + + c) to distribute or communicate copies of the Original Work and Derivative + Works to the public, with the proviso that copies of Original Work or + Derivative Works that You distribute or communicate shall be licensed under + this Open Software License; + + d) to perform the Original Work publicly; and + + e) to display the Original Work publicly. + +2) Grant of Patent License. Licensor grants You a worldwide, royalty-free, +non-exclusive, sublicensable license, under patent claims owned or controlled +by the Licensor that are embodied in the Original Work as furnished by the +Licensor, for the duration of the patents, to make, use, sell, offer for sale, +have made, and import the Original Work and Derivative Works. + +3) Grant of Source Code License. The term "Source Code" means the preferred +form of the Original Work for making modifications to it and all available +documentation describing how to modify the Original Work. Licensor agrees to +provide a machine-readable copy of the Source Code of the Original Work along +with each copy of the Original Work that Licensor distributes. Licensor +reserves the right to satisfy this obligation by placing a machine-readable +copy of the Source Code in an information repository reasonably calculated to +permit inexpensive and convenient access by You for as long as Licensor +continues to distribute the Original Work. + +4) Exclusions From License Grant. Neither the names of Licensor, nor the names +of any contributors to the Original Work, nor any of their trademarks or +service marks, may be used to endorse or promote products derived from this +Original Work without express prior permission of the Licensor. Except as +expressly stated herein, nothing in this License grants any license to +Licensor's trademarks, copyrights, patents, trade secrets or any other +intellectual property. No patent license is granted to make, use, sell, offer +for sale, have made, or import embodiments of any patent claims other than the +licensed claims defined in Section 2. No license is granted to the trademarks +of Licensor even if such marks are included in the Original Work. Nothing in +this License shall be interpreted to prohibit Licensor from licensing under +terms different from this License any Original Work that Licensor otherwise +would have a right to license. + +5) External Deployment. The term "External Deployment" means the use, +distribution, or communication of the Original Work or Derivative Works in any +way such that the Original Work or Derivative Works may be used by anyone +other than You, whether those works are distributed or communicated to those +persons or made available as an application intended for use over a network. +As an express condition for the grants of license hereunder, You must treat +any External Deployment by You of the Original Work or a Derivative Work as a +distribution under section 1(c). + +6) Attribution Rights. You must retain, in the Source Code of any Derivative +Works that You create, all copyright, patent, or trademark notices from the +Source Code of the Original Work, as well as any notices of licensing and any +descriptive text identified therein as an "Attribution Notice." You must cause +the Source Code for any Derivative Works that You create to carry a prominent +Attribution Notice reasonably calculated to inform recipients that You have +modified the Original Work. + +7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that +the copyright in and to the Original Work and the patent rights granted herein +by Licensor are owned by the Licensor or are sublicensed to You under the +terms of this License with the permission of the contributor(s) of those +copyrights and patent rights. Except as expressly stated in the immediately +preceding sentence, the Original Work is provided under this License on an "AS +IS" BASIS and WITHOUT WARRANTY, either express or implied, including, without +limitation, the warranties of non-infringement, merchantability or fitness for +a particular purpose. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK +IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this +License. No license to the Original Work is granted by this License except +under this disclaimer. + +8) Limitation of Liability. Under no circumstances and under no legal theory, +whether in tort (including negligence), contract, or otherwise, shall the +Licensor be liable to anyone for any indirect, special, incidental, or +consequential damages of any character arising as a result of this License or +the use of the Original Work including, without limitation, damages for loss +of goodwill, work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses. This limitation of liability shall not +apply to the extent applicable law prohibits such limitation. + +9) Acceptance and Termination. If, at any time, You expressly assented to this +License, that assent indicates your clear and irrevocable acceptance of this +License and all of its terms and conditions. If You distribute or communicate +copies of the Original Work or a Derivative Work, You must make a reasonable +effort under the circumstances to obtain the express assent of recipients to +the terms of this License. This License conditions your rights to undertake +the activities listed in Section 1, including your right to create Derivative +Works based upon the Original Work, and doing so without honoring these terms +and conditions is prohibited by copyright law and international treaty. +Nothing in this License is intended to affect copyright exceptions and +limitations (including "fair use" or "fair dealing"). This License shall +terminate immediately and You may no longer exercise any of the rights granted +to You by this License upon your failure to honor the conditions in Section +1(c). + +10) Termination for Patent Action. This License shall terminate automatically +and You may no longer exercise any of the rights granted to You by this +License as of the date You commence an action, including a cross-claim or +counterclaim, against Licensor or any licensee alleging that the Original Work +infringes a patent. This termination provision shall not apply for an action +alleging patent infringement by combinations of the Original Work with other +software or hardware. + +11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this +License may be brought only in the courts of a jurisdiction wherein the +Licensor resides or in which Licensor conducts its primary business, and under +the laws of that jurisdiction excluding its conflict-of-law provisions. The +application of the United Nations Convention on Contracts for the +International Sale of Goods is expressly excluded. Any use of the Original +Work outside the scope of this License or after its termination shall be +subject to the requirements and penalties of copyright or patent law in the +appropriate jurisdiction. This section shall survive the termination of this +License. + +12) Attorneys' Fees. In any action to enforce the terms of this License or +seeking damages relating thereto, the prevailing party shall be entitled to +recover its costs and expenses, including, without limitation, reasonable +attorneys' fees and costs incurred in connection with such action, including +any appeal of such action. This section shall survive the termination of this +License. + +13) Miscellaneous. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent necessary +to make it enforceable. + +14) Definition of "You" in This License. "You" throughout this License, +whether in upper or lower case, means an individual or a legal entity +exercising rights under, and complying with all of the terms of, this License. +For legal entities, "You" includes any entity that controls, is controlled by, +or is under common control with you. For purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the direction or +management of such entity, whether by contract or otherwise, or (ii) ownership +of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial +ownership of such entity. + +15) Right to Use. You may use the Original Work in all ways not otherwise +restricted or conditioned by this License or by law, and Licensor promises not +to interfere with or be responsible for such uses by You. + +16) Modification of This License. This License is Copyright © 2005 Lawrence +Rosen. Permission is granted to copy, distribute, or communicate this License +without modification. Nothing in this License permits You to modify this +License as applied to the Original Work or to Derivative Works. However, You +may modify the text of this License and copy, distribute or communicate your +modified version (the "Modified License") and apply it to other original works +of authorship subject to the following conditions: (i) You may not indicate in +any way that your Modified License is the "Open Software License" or "OSL" and +you may not use those names in the name of your Modified License; (ii) You +must replace the notice specified in the first paragraph above with the notice +"Licensed under " or with a notice of your own +that is not confusingly similar to the notice in this License; and (iii) You +may not claim that your original works are open source software unless your +Modified License has been approved by Open Source Initiative (OSI) and You +comply with its license review and certification process. +''', + ), + ( + 'PostgreSQL', + '''\ +PostgreSQL License + +Copyright (c) [year], [fullname] + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose, without fee, and without a written agreement is +hereby granted, provided that the above copyright notice and this paragraph +and the following two paragraphs appear in all copies. + +IN NO EVENT SHALL [fullname] BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, +SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS, ARISING +OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF [fullname] +HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +[fullname] SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, +AND [fullname] HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, +ENHANCEMENTS, OR MODIFICATIONS. +''', + ), + ( + 'UPL-1.0', + '''\ +Copyright (c) [year] [fullname] + +The Universal Permissive License (UPL), Version 1.0 + +Subject to the condition set forth below, permission is hereby granted to any +person obtaining a copy of this software, associate documentation and/or data +(collectively the "Software"), free of charge and under any and all copyright +rights in the Software, and any and all patent rights owned or freely +licensable by each licensor hereunder covering either (i) the unmodified +Software as contributed to or provided by such licensor, or (ii) the Larger +Works (as defined below), to deal in both + +(a) the Software, and +(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if +one is included with the Software (each a “Larger Work” to which the Software +is contributed by such licensors), + +without restriction, including without limitation the rights to copy, create +derivative works of, display, perform, and distribute the Software and make, +use, sell, offer for sale, import, export, have made, and have sold the +Software and the Larger Work(s), and to sublicense the foregoing rights on +either these or other terms. + +This license is subject to the following condition: +The above copyright notice and either this complete permission notice or at +a minimum a reference to the UPL must be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +''', + ), + ( + 'Unlicense', + '''\ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to +''', + ), + ( + 'WTFPL', + '''\ +DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + Version 2, December 2004 + + Copyright (C) 2004 Sam Hocevar + + Everyone is permitted to copy and distribute verbatim or modified + copies of this license document, and changing it is allowed as long + as the name is changed. + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. You just DO WHAT THE FUCK YOU WANT TO. +''', + ), + ( + 'Zlib', + '''\ +zlib License + +(C) [year] [fullname] + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. +''', + ), +) diff --git a/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/INSTALLER b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/METADATA b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/METADATA new file mode 100644 index 00000000..ffef2ff3 --- /dev/null +++ b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/METADATA @@ -0,0 +1,84 @@ +Metadata-Version: 2.4 +Name: Jinja2 +Version: 3.1.6 +Summary: A very fast and expressive template engine. +Maintainer-email: Pallets +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Typing :: Typed +License-File: LICENSE.txt +Requires-Dist: MarkupSafe>=2.0 +Requires-Dist: Babel>=2.7 ; extra == "i18n" +Project-URL: Changes, https://jinja.palletsprojects.com/changes/ +Project-URL: Chat, https://discord.gg/pallets +Project-URL: Documentation, https://jinja.palletsprojects.com/ +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Source, https://github.com/pallets/jinja/ +Provides-Extra: i18n + +# Jinja + +Jinja is a fast, expressive, extensible templating engine. Special +placeholders in the template allow writing code similar to Python +syntax. Then the template is passed data to render the final document. + +It includes: + +- Template inheritance and inclusion. +- Define and import macros within templates. +- HTML templates can use autoescaping to prevent XSS from untrusted + user input. +- A sandboxed environment can safely render untrusted templates. +- AsyncIO support for generating templates and calling async + functions. +- I18N support with Babel. +- Templates are compiled to optimized Python code just-in-time and + cached, or can be compiled ahead-of-time. +- Exceptions point to the correct line in templates to make debugging + easier. +- Extensible filters, tests, functions, and even syntax. + +Jinja's philosophy is that while application logic belongs in Python if +possible, it shouldn't make the template designer's job difficult by +restricting functionality too much. + + +## In A Nutshell + +```jinja +{% extends "base.html" %} +{% block title %}Members{% endblock %} +{% block content %} + +{% endblock %} +``` + +## Donate + +The Pallets organization develops and supports Jinja and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, [please +donate today][]. + +[please donate today]: https://palletsprojects.com/donate + +## Contributing + +See our [detailed contributing documentation][contrib] for many ways to +contribute, including reporting issues, requesting features, asking or answering +questions, and making PRs. + +[contrib]: https://palletsprojects.com/contributing/ + diff --git a/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/RECORD b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/RECORD new file mode 100644 index 00000000..0ca35a74 --- /dev/null +++ b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/RECORD @@ -0,0 +1,57 @@ +jinja2-3.1.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jinja2-3.1.6.dist-info/METADATA,sha256=aMVUj7Z8QTKhOJjZsx7FDGvqKr3ZFdkh8hQ1XDpkmcg,2871 +jinja2-3.1.6.dist-info/RECORD,, +jinja2-3.1.6.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82 +jinja2-3.1.6.dist-info/entry_points.txt,sha256=OL85gYU1eD8cuPlikifFngXpeBjaxl6rIJ8KkC_3r-I,58 +jinja2-3.1.6.dist-info/licenses/LICENSE.txt,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +jinja2/__init__.py,sha256=xxepO9i7DHsqkQrgBEduLtfoz2QCuT6_gbL4XSN1hbU,1928 +jinja2/__pycache__/__init__.cpython-312.pyc,, +jinja2/__pycache__/_identifier.cpython-312.pyc,, +jinja2/__pycache__/async_utils.cpython-312.pyc,, +jinja2/__pycache__/bccache.cpython-312.pyc,, +jinja2/__pycache__/compiler.cpython-312.pyc,, +jinja2/__pycache__/constants.cpython-312.pyc,, +jinja2/__pycache__/debug.cpython-312.pyc,, +jinja2/__pycache__/defaults.cpython-312.pyc,, +jinja2/__pycache__/environment.cpython-312.pyc,, +jinja2/__pycache__/exceptions.cpython-312.pyc,, +jinja2/__pycache__/ext.cpython-312.pyc,, +jinja2/__pycache__/filters.cpython-312.pyc,, +jinja2/__pycache__/idtracking.cpython-312.pyc,, +jinja2/__pycache__/lexer.cpython-312.pyc,, +jinja2/__pycache__/loaders.cpython-312.pyc,, +jinja2/__pycache__/meta.cpython-312.pyc,, +jinja2/__pycache__/nativetypes.cpython-312.pyc,, +jinja2/__pycache__/nodes.cpython-312.pyc,, +jinja2/__pycache__/optimizer.cpython-312.pyc,, +jinja2/__pycache__/parser.cpython-312.pyc,, +jinja2/__pycache__/runtime.cpython-312.pyc,, +jinja2/__pycache__/sandbox.cpython-312.pyc,, +jinja2/__pycache__/tests.cpython-312.pyc,, +jinja2/__pycache__/utils.cpython-312.pyc,, +jinja2/__pycache__/visitor.cpython-312.pyc,, +jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 +jinja2/async_utils.py,sha256=vK-PdsuorOMnWSnEkT3iUJRIkTnYgO2T6MnGxDgHI5o,2834 +jinja2/bccache.py,sha256=gh0qs9rulnXo0PhX5jTJy2UHzI8wFnQ63o_vw7nhzRg,14061 +jinja2/compiler.py,sha256=9RpCQl5X88BHllJiPsHPh295Hh0uApvwFJNQuutULeM,74131 +jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 +jinja2/debug.py,sha256=CnHqCDHd-BVGvti_8ZsTolnXNhA3ECsY-6n_2pwU8Hw,6297 +jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 +jinja2/environment.py,sha256=9nhrP7Ch-NbGX00wvyr4yy-uhNHq2OCc60ggGrni_fk,61513 +jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 +jinja2/ext.py,sha256=5PF5eHfh8mXAIxXHHRB2xXbXohi8pE3nHSOxa66uS7E,31875 +jinja2/filters.py,sha256=PQ_Egd9n9jSgtnGQYyF4K5j2nYwhUIulhPnyimkdr-k,55212 +jinja2/idtracking.py,sha256=-ll5lIp73pML3ErUYiIJj7tdmWxcH_IlDv3yA_hiZYo,10555 +jinja2/lexer.py,sha256=LYiYio6br-Tep9nPcupWXsPEtjluw3p1mU-lNBVRUfk,29786 +jinja2/loaders.py,sha256=wIrnxjvcbqh5VwW28NSkfotiDq8qNCxIOSFbGUiSLB4,24055 +jinja2/meta.py,sha256=OTDPkaFvU2Hgvx-6akz7154F8BIWaRmvJcBFvwopHww,4397 +jinja2/nativetypes.py,sha256=7GIGALVJgdyL80oZJdQUaUfwSt5q2lSSZbXt0dNf_M4,4210 +jinja2/nodes.py,sha256=m1Duzcr6qhZI8JQ6VyJgUNinjAf5bQzijSmDnMsvUx8,34579 +jinja2/optimizer.py,sha256=rJnCRlQ7pZsEEmMhsQDgC_pKyDHxP5TPS6zVPGsgcu8,1651 +jinja2/parser.py,sha256=lLOFy3sEmHc5IaEHRiH1sQVnId2moUQzhyeJZTtdY30,40383 +jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jinja2/runtime.py,sha256=gDk-GvdriJXqgsGbHgrcKTP0Yp6zPXzhzrIpCFH3jAU,34249 +jinja2/sandbox.py,sha256=Mw2aitlY2I8la7FYhcX2YG9BtUYcLnD0Gh3d29cDWrY,15009 +jinja2/tests.py,sha256=VLsBhVFnWg-PxSBz1MhRnNWgP1ovXk3neO1FLQMeC9Q,5926 +jinja2/utils.py,sha256=rRp3o9e7ZKS4fyrWRbELyLcpuGVTFcnooaOa1qx_FIk,24129 +jinja2/visitor.py,sha256=EcnL1PIwf_4RVCOMxsRNuR8AXHbS1qfAdMOE2ngKJz4,3557 diff --git a/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/WHEEL b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/WHEEL new file mode 100644 index 00000000..23d2d7e9 --- /dev/null +++ b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.11.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/entry_points.txt b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/entry_points.txt new file mode 100644 index 00000000..abc3eae3 --- /dev/null +++ b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[babel.extractors] +jinja2=jinja2.ext:babel_extract[i18n] + diff --git a/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt new file mode 100644 index 00000000..c37cae49 --- /dev/null +++ b/.venv/Lib/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/Lib/site-packages/jinja2/__init__.py b/.venv/Lib/site-packages/jinja2/__init__.py new file mode 100644 index 00000000..1a423a3e --- /dev/null +++ b/.venv/Lib/site-packages/jinja2/__init__.py @@ -0,0 +1,38 @@ +"""Jinja is a template engine written in pure Python. It provides a +non-XML syntax that supports inline expressions and an optional +sandboxed environment. +""" + +from .bccache import BytecodeCache as BytecodeCache +from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache +from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache +from .environment import Environment as Environment +from .environment import Template as Template +from .exceptions import TemplateAssertionError as TemplateAssertionError +from .exceptions import TemplateError as TemplateError +from .exceptions import TemplateNotFound as TemplateNotFound +from .exceptions import TemplateRuntimeError as TemplateRuntimeError +from .exceptions import TemplatesNotFound as TemplatesNotFound +from .exceptions import TemplateSyntaxError as TemplateSyntaxError +from .exceptions import UndefinedError as UndefinedError +from .loaders import BaseLoader as BaseLoader +from .loaders import ChoiceLoader as ChoiceLoader +from .loaders import DictLoader as DictLoader +from .loaders import FileSystemLoader as FileSystemLoader +from .loaders import FunctionLoader as FunctionLoader +from .loaders import ModuleLoader as ModuleLoader +from .loaders import PackageLoader as PackageLoader +from .loaders import PrefixLoader as PrefixLoader +from .runtime import ChainableUndefined as ChainableUndefined +from .runtime import DebugUndefined as DebugUndefined +from .runtime import make_logging_undefined as make_logging_undefined +from .runtime import StrictUndefined as StrictUndefined +from .runtime import Undefined as Undefined +from .utils import clear_caches as clear_caches +from .utils import is_undefined as is_undefined +from .utils import pass_context as pass_context +from .utils import pass_environment as pass_environment +from .utils import pass_eval_context as pass_eval_context +from .utils import select_autoescape as select_autoescape + +__version__ = "3.1.6" diff --git a/.venv/Lib/site-packages/jinja2/_identifier.py b/.venv/Lib/site-packages/jinja2/_identifier.py new file mode 100644 index 00000000..928c1503 --- /dev/null +++ b/.venv/Lib/site-packages/jinja2/_identifier.py @@ -0,0 +1,6 @@ +import re + +# generated by scripts/generate_identifier_pattern.py +pattern = re.compile( + r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 +) diff --git a/.venv/Lib/site-packages/jinja2/async_utils.py b/.venv/Lib/site-packages/jinja2/async_utils.py new file mode 100644 index 00000000..f0c14020 --- /dev/null +++ b/.venv/Lib/site-packages/jinja2/async_utils.py @@ -0,0 +1,99 @@ +import inspect +import typing as t +from functools import WRAPPER_ASSIGNMENTS +from functools import wraps + +from .utils import _PassArg +from .utils import pass_eval_context + +if t.TYPE_CHECKING: + import typing_extensions as te + +V = t.TypeVar("V") + + +def async_variant(normal_func): # type: ignore + def decorator(async_func): # type: ignore + pass_arg = _PassArg.from_obj(normal_func) + need_eval_context = pass_arg is None + + if pass_arg is _PassArg.environment: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].is_async) + + else: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].environment.is_async) + + # Take the doc and annotations from the sync function, but the + # name from the async function. Pallets-Sphinx-Themes + # build_function_directive expects __wrapped__ to point to the + # sync function. + async_func_attrs = ("__module__", "__name__", "__qualname__") + normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) + + @wraps(normal_func, assigned=normal_func_attrs) + @wraps(async_func, assigned=async_func_attrs, updated=()) + def wrapper(*args, **kwargs): # type: ignore + b = is_async(args) + + if need_eval_context: + args = args[1:] + + if b: + return async_func(*args, **kwargs) + + return normal_func(*args, **kwargs) + + if need_eval_context: + wrapper = pass_eval_context(wrapper) + + wrapper.jinja_async_variant = True # type: ignore[attr-defined] + return wrapper + + return decorator + + +_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} + + +async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": + # Avoid a costly call to isawaitable + if type(value) in _common_primitives: + return t.cast("V", value) + + if inspect.isawaitable(value): + return await t.cast("t.Awaitable[V]", value) + + return value + + +class _IteratorToAsyncIterator(t.Generic[V]): + def __init__(self, iterator: "t.Iterator[V]"): + self._iterator = iterator + + def __aiter__(self) -> "te.Self": + return self + + async def __anext__(self) -> V: + try: + return next(self._iterator) + except StopIteration as e: + raise StopAsyncIteration(e.value) from e + + +def auto_aiter( + iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> "t.AsyncIterator[V]": + if hasattr(iterable, "__aiter__"): + return iterable.__aiter__() + else: + return _IteratorToAsyncIterator(iter(iterable)) + + +async def auto_to_list( + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> t.List["V"]: + return [x async for x in auto_aiter(value)] diff --git a/.venv/Lib/site-packages/jinja2/bccache.py b/.venv/Lib/site-packages/jinja2/bccache.py new file mode 100644 index 00000000..ada8b099 --- /dev/null +++ b/.venv/Lib/site-packages/jinja2/bccache.py @@ -0,0 +1,408 @@ +"""The optional bytecode cache system. This is useful if you have very +complex template situations and the compilation of all those templates +slows down your application too much. + +Situations where this is useful are often forking web applications that +are initialized on the first request. +""" + +import errno +import fnmatch +import marshal +import os +import pickle +import stat +import sys +import tempfile +import typing as t +from hashlib import sha1 +from io import BytesIO +from types import CodeType + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .environment import Environment + + class _MemcachedClient(te.Protocol): + def get(self, key: str) -> bytes: ... + + def set( + self, key: str, value: bytes, timeout: t.Optional[int] = None + ) -> None: ... + + +bc_version = 5 +# Magic bytes to identify Jinja bytecode cache files. Contains the +# Python major and minor version to avoid loading incompatible bytecode +# if a project upgrades its Python version. +bc_magic = ( + b"j2" + + pickle.dumps(bc_version, 2) + + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) +) + + +class Bucket: + """Buckets are used to store the bytecode for one template. It's created + and initialized by the bytecode cache and passed to the loading functions. + + The buckets get an internal checksum from the cache assigned and use this + to automatically reject outdated cache material. Individual bytecode + cache subclasses don't have to care about cache invalidation. + """ + + def __init__(self, environment: "Environment", key: str, checksum: str) -> None: + self.environment = environment + self.key = key + self.checksum = checksum + self.reset() + + def reset(self) -> None: + """Resets the bucket (unloads the bytecode).""" + self.code: t.Optional[CodeType] = None + + def load_bytecode(self, f: t.BinaryIO) -> None: + """Loads bytecode from a file or file like object.""" + # make sure the magic header is correct + magic = f.read(len(bc_magic)) + if magic != bc_magic: + self.reset() + return + # the source code of the file changed, we need to reload + checksum = pickle.load(f) + if self.checksum != checksum: + self.reset() + return + # if marshal_load fails then we need to reload + try: + self.code = marshal.load(f) + except (EOFError, ValueError, TypeError): + self.reset() + return + + def write_bytecode(self, f: t.IO[bytes]) -> None: + """Dump the bytecode into the file or file like object passed.""" + if self.code is None: + raise TypeError("can't write empty bucket") + f.write(bc_magic) + pickle.dump(self.checksum, f, 2) + marshal.dump(self.code, f) + + def bytecode_from_string(self, string: bytes) -> None: + """Load bytecode from bytes.""" + self.load_bytecode(BytesIO(string)) + + def bytecode_to_string(self) -> bytes: + """Return the bytecode as bytes.""" + out = BytesIO() + self.write_bytecode(out) + return out.getvalue() + + +class BytecodeCache: + """To implement your own bytecode cache you have to subclass this class + and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of + these methods are passed a :class:`~jinja2.bccache.Bucket`. + + A very basic bytecode cache that saves the bytecode on the file system:: + + from os import path + + class MyCache(BytecodeCache): + + def __init__(self, directory): + self.directory = directory + + def load_bytecode(self, bucket): + filename = path.join(self.directory, bucket.key) + if path.exists(filename): + with open(filename, 'rb') as f: + bucket.load_bytecode(f) + + def dump_bytecode(self, bucket): + filename = path.join(self.directory, bucket.key) + with open(filename, 'wb') as f: + bucket.write_bytecode(f) + + A more advanced version of a filesystem based bytecode cache is part of + Jinja. + """ + + def load_bytecode(self, bucket: Bucket) -> None: + """Subclasses have to override this method to load bytecode into a + bucket. If they are not able to find code in the cache for the + bucket, it must not do anything. + """ + raise NotImplementedError() + + def dump_bytecode(self, bucket: Bucket) -> None: + """Subclasses have to override this method to write the bytecode + from a bucket back to the cache. If it unable to do so it must not + fail silently but raise an exception. + """ + raise NotImplementedError() + + def clear(self) -> None: + """Clears the cache. This method is not used by Jinja but should be + implemented to allow applications to clear the bytecode cache used + by a particular environment. + """ + + def get_cache_key( + self, name: str, filename: t.Optional[t.Union[str]] = None + ) -> str: + """Returns the unique hash key for this template name.""" + hash = sha1(name.encode("utf-8")) + + if filename is not None: + hash.update(f"|{filename}".encode()) + + return hash.hexdigest() + + def get_source_checksum(self, source: str) -> str: + """Returns a checksum for the source.""" + return sha1(source.encode("utf-8")).hexdigest() + + def get_bucket( + self, + environment: "Environment", + name: str, + filename: t.Optional[str], + source: str, + ) -> Bucket: + """Return a cache bucket for the given template. All arguments are + mandatory but filename may be `None`. + """ + key = self.get_cache_key(name, filename) + checksum = self.get_source_checksum(source) + bucket = Bucket(environment, key, checksum) + self.load_bytecode(bucket) + return bucket + + def set_bucket(self, bucket: Bucket) -> None: + """Put the bucket into the cache.""" + self.dump_bytecode(bucket) + + +class FileSystemBytecodeCache(BytecodeCache): + """A bytecode cache that stores bytecode on the filesystem. It accepts + two arguments: The directory where the cache items are stored and a + pattern string that is used to build the filename. + + If no directory is specified a default cache directory is selected. On + Windows the user's temp directory is used, on UNIX systems a directory + is created for the user in the system temp directory. + + The pattern can be used to have multiple separate caches operate on the + same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` + is replaced with the cache key. + + >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') + + This bytecode cache supports clearing of the cache using the clear method. + """ + + def __init__( + self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" + ) -> None: + if directory is None: + directory = self._get_default_cache_dir() + self.directory = directory + self.pattern = pattern + + def _get_default_cache_dir(self) -> str: + def _unsafe_dir() -> "te.NoReturn": + raise RuntimeError( + "Cannot determine safe temp directory. You " + "need to explicitly provide one." + ) + + tmpdir = tempfile.gettempdir() + + # On windows the temporary directory is used specific unless + # explicitly forced otherwise. We can just use that. + if os.name == "nt": + return tmpdir + if not hasattr(os, "getuid"): + _unsafe_dir() + + dirname = f"_jinja2-cache-{os.getuid()}" + actual_dir = os.path.join(tmpdir, dirname) + + try: + os.mkdir(actual_dir, stat.S_IRWXU) + except OSError as e: + if e.errno != errno.EEXIST: + raise + try: + os.chmod(actual_dir, stat.S_IRWXU) + actual_dir_stat = os.lstat(actual_dir) + if ( + actual_dir_stat.st_uid != os.getuid() + or not stat.S_ISDIR(actual_dir_stat.st_mode) + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU + ): + _unsafe_dir() + except OSError as e: + if e.errno != errno.EEXIST: + raise + + actual_dir_stat = os.lstat(actual_dir) + if ( + actual_dir_stat.st_uid != os.getuid() + or not stat.S_ISDIR(actual_dir_stat.st_mode) + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU + ): + _unsafe_dir() + + return actual_dir + + def _get_cache_filename(self, bucket: Bucket) -> str: + return os.path.join(self.directory, self.pattern % (bucket.key,)) + + def load_bytecode(self, bucket: Bucket) -> None: + filename = self._get_cache_filename(bucket) + + # Don't test for existence before opening the file, since the + # file could disappear after the test before the open. + try: + f = open(filename, "rb") + except (FileNotFoundError, IsADirectoryError, PermissionError): + # PermissionError can occur on Windows when an operation is + # in progress, such as calling clear(). + return + + with f: + bucket.load_bytecode(f) + + def dump_bytecode(self, bucket: Bucket) -> None: + # Write to a temporary file, then rename to the real name after + # writing. This avoids another process reading the file before + # it is fully written. + name = self._get_cache_filename(bucket) + f = tempfile.NamedTemporaryFile( + mode="wb", + dir=os.path.dirname(name), + prefix=os.path.basename(name), + suffix=".tmp", + delete=False, + ) + + def remove_silent() -> None: + try: + os.remove(f.name) + except OSError: + # Another process may have called clear(). On Windows, + # another program may be holding the file open. + pass + + try: + with f: + bucket.write_bytecode(f) + except BaseException: + remove_silent() + raise + + try: + os.replace(f.name, name) + except OSError: + # Another process may have called clear(). On Windows, + # another program may be holding the file open. + remove_silent() + except BaseException: + remove_silent() + raise + + def clear(self) -> None: + # imported lazily here because google app-engine doesn't support + # write access on the file system and the function does not exist + # normally. + from os import remove + + files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) + for filename in files: + try: + remove(os.path.join(self.directory, filename)) + except OSError: + pass + + +class MemcachedBytecodeCache(BytecodeCache): + """This class implements a bytecode cache that uses a memcache cache for + storing the information. It does not enforce a specific memcache library + (tummy's memcache or cmemcache) but will accept any class that provides + the minimal interface required. + + Libraries compatible with this class: + + - `cachelib `_ + - `python-memcached `_ + + (Unfortunately the django cache interface is not compatible because it + does not support storing binary data, only text. You can however pass + the underlying cache client to the bytecode cache which is available + as `django.core.cache.cache._client`.) + + The minimal interface for the client passed to the constructor is this: + + .. class:: MinimalClientInterface + + .. method:: set(key, value[, timeout]) + + Stores the bytecode in the cache. `value` is a string and + `timeout` the timeout of the key. If timeout is not provided + a default timeout or no timeout should be assumed, if it's + provided it's an integer with the number of seconds the cache + item should exist. + + .. method:: get(key) + + Returns the value for the cache key. If the item does not + exist in the cache the return value must be `None`. + + The other arguments to the constructor are the prefix for all keys that + is added before the actual cache key and the timeout for the bytecode in + the cache system. We recommend a high (or no) timeout. + + This bytecode cache does not support clearing of used items in the cache. + The clear method is a no-operation function. + + .. versionadded:: 2.7 + Added support for ignoring memcache errors through the + `ignore_memcache_errors` parameter. + """ + + def __init__( + self, + client: "_MemcachedClient", + prefix: str = "jinja2/bytecode/", + timeout: t.Optional[int] = None, + ignore_memcache_errors: bool = True, + ): + self.client = client + self.prefix = prefix + self.timeout = timeout + self.ignore_memcache_errors = ignore_memcache_errors + + def load_bytecode(self, bucket: Bucket) -> None: + try: + code = self.client.get(self.prefix + bucket.key) + except Exception: + if not self.ignore_memcache_errors: + raise + else: + bucket.bytecode_from_string(code) + + def dump_bytecode(self, bucket: Bucket) -> None: + key = self.prefix + bucket.key + value = bucket.bytecode_to_string() + + try: + if self.timeout is not None: + self.client.set(key, value, self.timeout) + else: + self.client.set(key, value) + except Exception: + if not self.ignore_memcache_errors: + raise diff --git a/.venv/Lib/site-packages/jinja2/compiler.py b/.venv/Lib/site-packages/jinja2/compiler.py new file mode 100644 index 00000000..a4ff6a1b --- /dev/null +++ b/.venv/Lib/site-packages/jinja2/compiler.py @@ -0,0 +1,1998 @@ +"""Compiles nodes from the parser into Python code.""" + +import typing as t +from contextlib import contextmanager +from functools import update_wrapper +from io import StringIO +from itertools import chain +from keyword import iskeyword as is_python_keyword + +from markupsafe import escape +from markupsafe import Markup + +from . import nodes +from .exceptions import TemplateAssertionError +from .idtracking import Symbols +from .idtracking import VAR_LOAD_ALIAS +from .idtracking import VAR_LOAD_PARAMETER +from .idtracking import VAR_LOAD_RESOLVE +from .idtracking import VAR_LOAD_UNDEFINED +from .nodes import EvalContext +from .optimizer import Optimizer +from .utils import _PassArg +from .utils import concat +from .visitor import NodeVisitor + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .environment import Environment + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + +operators = { + "eq": "==", + "ne": "!=", + "gt": ">", + "gteq": ">=", + "lt": "<", + "lteq": "<=", + "in": "in", + "notin": "not in", +} + + +def optimizeconst(f: F) -> F: + def new_func( + self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any + ) -> t.Any: + # Only optimize if the frame is not volatile + if self.optimizer is not None and not frame.eval_ctx.volatile: + new_node = self.optimizer.visit(node, frame.eval_ctx) + + if new_node != node: + return self.visit(new_node, frame) + + return f(self, node, frame, **kwargs) + + return update_wrapper(new_func, f) # type: ignore[return-value] + + +def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed and op in self.environment.intercepted_binops # type: ignore + ): + self.write(f"environment.call_binop(context, {op!r}, ") + self.visit(node.left, frame) + self.write(", ") + self.visit(node.right, frame) + else: + self.write("(") + self.visit(node.left, frame) + self.write(f" {op} ") + self.visit(node.right, frame) + + self.write(")") + + return visitor + + +def _make_unop( + op: str, +) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed and op in self.environment.intercepted_unops # type: ignore + ): + self.write(f"environment.call_unop(context, {op!r}, ") + self.visit(node.node, frame) + else: + self.write("(" + op) + self.visit(node.node, frame) + + self.write(")") + + return visitor + + +def generate( + node: nodes.Template, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, +) -> t.Optional[str]: + """Generate the python source for a node tree.""" + if not isinstance(node, nodes.Template): + raise TypeError("Can't compile non template nodes") + + generator = environment.code_generator_class( + environment, name, filename, stream, defer_init, optimized + ) + generator.visit(node) + + if stream is None: + return generator.stream.getvalue() # type: ignore + + return None + + +def has_safe_repr(value: t.Any) -> bool: + """Does the node have a safe representation?""" + if value is None or value is NotImplemented or value is Ellipsis: + return True + + if type(value) in {bool, int, float, complex, range, str, Markup}: + return True + + if type(value) in {tuple, list, set, frozenset}: + return all(has_safe_repr(v) for v in value) + + if type(value) is dict: # noqa E721 + return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) + + return False + + +def find_undeclared( + nodes: t.Iterable[nodes.Node], names: t.Iterable[str] +) -> t.Set[str]: + """Check if the names passed are accessed undeclared. The return value + is a set of all the undeclared names from the sequence of names found. + """ + visitor = UndeclaredNameVisitor(names) + try: + for node in nodes: + visitor.visit(node) + except VisitorExit: + pass + return visitor.undeclared + + +class MacroRef: + def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: + self.node = node + self.accesses_caller = False + self.accesses_kwargs = False + self.accesses_varargs = False + + +class Frame: + """Holds compile time information for us.""" + + def __init__( + self, + eval_ctx: EvalContext, + parent: t.Optional["Frame"] = None, + level: t.Optional[int] = None, + ) -> None: + self.eval_ctx = eval_ctx + + # the parent of this frame + self.parent = parent + + if parent is None: + self.symbols = Symbols(level=level) + + # in some dynamic inheritance situations the compiler needs to add + # write tests around output statements. + self.require_output_check = False + + # inside some tags we are using a buffer rather than yield statements. + # this for example affects {% filter %} or {% macro %}. If a frame + # is buffered this variable points to the name of the list used as + # buffer. + self.buffer: t.Optional[str] = None + + # the name of the block we're in, otherwise None. + self.block: t.Optional[str] = None + + else: + self.symbols = Symbols(parent.symbols, level=level) + self.require_output_check = parent.require_output_check + self.buffer = parent.buffer + self.block = parent.block + + # a toplevel frame is the root + soft frames such as if conditions. + self.toplevel = False + + # the root frame is basically just the outermost frame, so no if + # conditions. This information is used to optimize inheritance + # situations. + self.rootlevel = False + + # variables set inside of loops and blocks should not affect outer frames, + # but they still needs to be kept track of as part of the active context. + self.loop_frame = False + self.block_frame = False + + # track whether the frame is being used in an if-statement or conditional + # expression as it determines which errors should be raised during runtime + # or compile time. + self.soft_frame = False + + def copy(self) -> "te.Self": + """Create a copy of the current one.""" + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.symbols = self.symbols.copy() + return rv + + def inner(self, isolated: bool = False) -> "Frame": + """Return an inner frame.""" + if isolated: + return Frame(self.eval_ctx, level=self.symbols.level + 1) + return Frame(self.eval_ctx, self) + + def soft(self) -> "te.Self": + """Return a soft frame. A soft frame may not be modified as + standalone thing as it shares the resources with the frame it + was created of, but it's not a rootlevel frame any longer. + + This is only used to implement if-statements and conditional + expressions. + """ + rv = self.copy() + rv.rootlevel = False + rv.soft_frame = True + return rv + + __copy__ = copy + + +class VisitorExit(RuntimeError): + """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" + + +class DependencyFinderVisitor(NodeVisitor): + """A visitor that collects filter and test calls.""" + + def __init__(self) -> None: + self.filters: t.Set[str] = set() + self.tests: t.Set[str] = set() + + def visit_Filter(self, node: nodes.Filter) -> None: + self.generic_visit(node) + self.filters.add(node.name) + + def visit_Test(self, node: nodes.Test) -> None: + self.generic_visit(node) + self.tests.add(node.name) + + def visit_Block(self, node: nodes.Block) -> None: + """Stop visiting at blocks.""" + + +class UndeclaredNameVisitor(NodeVisitor): + """A visitor that checks if a name is accessed without being + declared. This is different from the frame visitor as it will + not stop at closure frames. + """ + + def __init__(self, names: t.Iterable[str]) -> None: + self.names = set(names) + self.undeclared: t.Set[str] = set() + + def visit_Name(self, node: nodes.Name) -> None: + if node.ctx == "load" and node.name in self.names: + self.undeclared.add(node.name) + if self.undeclared == self.names: + raise VisitorExit() + else: + self.names.discard(node.name) + + def visit_Block(self, node: nodes.Block) -> None: + """Stop visiting a blocks.""" + + +class CompilerExit(Exception): + """Raised if the compiler encountered a situation where it just + doesn't make sense to further process the code. Any block that + raises such an exception is not further processed. + """ + + +class CodeGenerator(NodeVisitor): + def __init__( + self, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, + ) -> None: + if stream is None: + stream = StringIO() + self.environment = environment + self.name = name + self.filename = filename + self.stream = stream + self.created_block_context = False + self.defer_init = defer_init + self.optimizer: t.Optional[Optimizer] = None + + if optimized: + self.optimizer = Optimizer(environment) + + # aliases for imports + self.import_aliases: t.Dict[str, str] = {} + + # a registry for all blocks. Because blocks are moved out + # into the global python scope they are registered here + self.blocks: t.Dict[str, nodes.Block] = {} + + # the number of extends statements so far + self.extends_so_far = 0 + + # some templates have a rootlevel extends. In this case we + # can safely assume that we're a child template and do some + # more optimizations. + self.has_known_extends = False + + # the current line number + self.code_lineno = 1 + + # registry of all filters and tests (global, not block local) + self.tests: t.Dict[str, str] = {} + self.filters: t.Dict[str, str] = {} + + # the debug information + self.debug_info: t.List[t.Tuple[int, int]] = [] + self._write_debug_info: t.Optional[int] = None + + # the number of new lines before the next write() + self._new_lines = 0 + + # the line number of the last written statement + self._last_line = 0 + + # true if nothing was written so far. + self._first_write = True + + # used by the `temporary_identifier` method to get new + # unique, temporary identifier + self._last_identifier = 0 + + # the current indentation + self._indentation = 0 + + # Tracks toplevel assignments + self._assign_stack: t.List[t.Set[str]] = [] + + # Tracks parameter definition blocks + self._param_def_block: t.List[t.Set[str]] = [] + + # Tracks the current context. + self._context_reference_stack = ["context"] + + @property + def optimized(self) -> bool: + return self.optimizer is not None + + # -- Various compilation helpers + + def fail(self, msg: str, lineno: int) -> "te.NoReturn": + """Fail with a :exc:`TemplateAssertionError`.""" + raise TemplateAssertionError(msg, lineno, self.name, self.filename) + + def temporary_identifier(self) -> str: + """Get a new unique identifier.""" + self._last_identifier += 1 + return f"t_{self._last_identifier}" + + def buffer(self, frame: Frame) -> None: + """Enable buffering for the frame from that point onwards.""" + frame.buffer = self.temporary_identifier() + self.writeline(f"{frame.buffer} = []") + + def return_buffer_contents( + self, frame: Frame, force_unescaped: bool = False + ) -> None: + """Return the buffer contents of the frame.""" + if not force_unescaped: + if frame.eval_ctx.volatile: + self.writeline("if context.eval_ctx.autoescape:") + self.indent() + self.writeline(f"return Markup(concat({frame.buffer}))") + self.outdent() + self.writeline("else:") + self.indent() + self.writeline(f"return concat({frame.buffer})") + self.outdent() + return + elif frame.eval_ctx.autoescape: + self.writeline(f"return Markup(concat({frame.buffer}))") + return + self.writeline(f"return concat({frame.buffer})") + + def indent(self) -> None: + """Indent by one.""" + self._indentation += 1 + + def outdent(self, step: int = 1) -> None: + """Outdent by step.""" + self._indentation -= step + + def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: + """Yield or write into the frame buffer.""" + if frame.buffer is None: + self.writeline("yield ", node) + else: + self.writeline(f"{frame.buffer}.append(", node) + + def end_write(self, frame: Frame) -> None: + """End the writing process started by `start_write`.""" + if frame.buffer is not None: + self.write(")") + + def simple_write( + self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None + ) -> None: + """Simple shortcut for start_write + write + end_write.""" + self.start_write(frame, node) + self.write(s) + self.end_write(frame) + + def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: + """Visit a list of nodes as block in a frame. If the current frame + is no buffer a dummy ``if 0: yield None`` is written automatically. + """ + try: + self.writeline("pass") + for node in nodes: + self.visit(node, frame) + except CompilerExit: + pass + + def write(self, x: str) -> None: + """Write a string into the output stream.""" + if self._new_lines: + if not self._first_write: + self.stream.write("\n" * self._new_lines) + self.code_lineno += self._new_lines + if self._write_debug_info is not None: + self.debug_info.append((self._write_debug_info, self.code_lineno)) + self._write_debug_info = None + self._first_write = False + self.stream.write(" " * self._indentation) + self._new_lines = 0 + self.stream.write(x) + + def writeline( + self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 + ) -> None: + """Combination of newline and write.""" + self.newline(node, extra) + self.write(x) + + def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: + """Add one or more newlines before the next write.""" + self._new_lines = max(self._new_lines, 1 + extra) + if node is not None and node.lineno != self._last_line: + self._write_debug_info = node.lineno + self._last_line = node.lineno + + def signature( + self, + node: t.Union[nodes.Call, nodes.Filter, nodes.Test], + frame: Frame, + extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> None: + """Writes a function call to the stream for the current node. + A leading comma is added automatically. The extra keyword + arguments may not include python keywords otherwise a syntax + error could occur. The extra keyword arguments should be given + as python dict. + """ + # if any of the given keyword arguments is a python keyword + # we have to make sure that no invalid call is created. + kwarg_workaround = any( + is_python_keyword(t.cast(str, k)) + for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) + ) + + for arg in node.args: + self.write(", ") + self.visit(arg, frame) + + if not kwarg_workaround: + for kwarg in node.kwargs: + self.write(", ") + self.visit(kwarg, frame) + if extra_kwargs is not None: + for key, value in extra_kwargs.items(): + self.write(f", {key}={value}") + if node.dyn_args: + self.write(", *") + self.visit(node.dyn_args, frame) + + if kwarg_workaround: + if node.dyn_kwargs is not None: + self.write(", **dict({") + else: + self.write(", **{") + for kwarg in node.kwargs: + self.write(f"{kwarg.key!r}: ") + self.visit(kwarg.value, frame) + self.write(", ") + if extra_kwargs is not None: + for key, value in extra_kwargs.items(): + self.write(f"{key!r}: {value}, ") + if node.dyn_kwargs is not None: + self.write("}, **") + self.visit(node.dyn_kwargs, frame) + self.write(")") + else: + self.write("}") + + elif node.dyn_kwargs is not None: + self.write(", **") + self.visit(node.dyn_kwargs, frame) + + def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: + """Find all filter and test names used in the template and + assign them to variables in the compiled namespace. Checking + that the names are registered with the environment is done when + compiling the Filter and Test nodes. If the node is in an If or + CondExpr node, the check is done at runtime instead. + + .. versionchanged:: 3.0 + Filters and tests in If and CondExpr nodes are checked at + runtime instead of compile time. + """ + visitor = DependencyFinderVisitor() + + for node in nodes: + visitor.visit(node) + + for id_map, names, dependency in ( + (self.filters, visitor.filters, "filters"), + ( + self.tests, + visitor.tests, + "tests", + ), + ): + for name in sorted(names): + if name not in id_map: + id_map[name] = self.temporary_identifier() + + # add check during runtime that dependencies used inside of executed + # blocks are defined, as this step may be skipped during compile time + self.writeline("try:") + self.indent() + self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") + self.outdent() + self.writeline("except KeyError:") + self.indent() + self.writeline("@internalcode") + self.writeline(f"def {id_map[name]}(*unused):") + self.indent() + self.writeline( + f'raise TemplateRuntimeError("No {dependency[:-1]}' + f' named {name!r} found.")' + ) + self.outdent() + self.outdent() + + def enter_frame(self, frame: Frame) -> None: + undefs = [] + for target, (action, param) in frame.symbols.loads.items(): + if action == VAR_LOAD_PARAMETER: + pass + elif action == VAR_LOAD_RESOLVE: + self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") + elif action == VAR_LOAD_ALIAS: + self.writeline(f"{target} = {param}") + elif action == VAR_LOAD_UNDEFINED: + undefs.append(target) + else: + raise NotImplementedError("unknown load instruction") + if undefs: + self.writeline(f"{' = '.join(undefs)} = missing") + + def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: + if not with_python_scope: + undefs = [] + for target in frame.symbols.loads: + undefs.append(target) + if undefs: + self.writeline(f"{' = '.join(undefs)} = missing") + + def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: + return async_value if self.environment.is_async else sync_value + + def func(self, name: str) -> str: + return f"{self.choose_async()}def {name}" + + def macro_body( + self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame + ) -> t.Tuple[Frame, MacroRef]: + """Dump the function def of a macro or call block.""" + frame = frame.inner() + frame.symbols.analyze_node(node) + macro_ref = MacroRef(node) + + explicit_caller = None + skip_special_params = set() + args = [] + + for idx, arg in enumerate(node.args): + if arg.name == "caller": + explicit_caller = idx + if arg.name in ("kwargs", "varargs"): + skip_special_params.add(arg.name) + args.append(frame.symbols.ref(arg.name)) + + undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) + + if "caller" in undeclared: + # In older Jinja versions there was a bug that allowed caller + # to retain the special behavior even if it was mentioned in + # the argument list. However thankfully this was only really + # working if it was the last argument. So we are explicitly + # checking this now and error out if it is anywhere else in + # the argument list. + if explicit_caller is not None: + try: + node.defaults[explicit_caller - len(node.args)] + except IndexError: + self.fail( + "When defining macros or call blocks the " + 'special "caller" argument must be omitted ' + "or be given a default.", + node.lineno, + ) + else: + args.append(frame.symbols.declare_parameter("caller")) + macro_ref.accesses_caller = True + if "kwargs" in undeclared and "kwargs" not in skip_special_params: + args.append(frame.symbols.declare_parameter("kwargs")) + macro_ref.accesses_kwargs = True + if "varargs" in undeclared and "varargs" not in skip_special_params: + args.append(frame.symbols.declare_parameter("varargs")) + macro_ref.accesses_varargs = True + + # macros are delayed, they never require output checks + frame.require_output_check = False + frame.symbols.analyze_node(node) + self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) + self.indent() + + self.buffer(frame) + self.enter_frame(frame) + + self.push_parameter_definitions(frame) + for idx, arg in enumerate(node.args): + ref = frame.symbols.ref(arg.name) + self.writeline(f"if {ref} is missing:") + self.indent() + try: + default = node.defaults[idx - len(node.args)] + except IndexError: + self.writeline( + f'{ref} = undefined("parameter {arg.name!r} was not provided",' + f" name={arg.name!r})" + ) + else: + self.writeline(f"{ref} = ") + self.visit(default, frame) + self.mark_parameter_stored(ref) + self.outdent() + self.pop_parameter_definitions() + + self.blockvisit(node.body, frame) + self.return_buffer_contents(frame, force_unescaped=True) + self.leave_frame(frame, with_python_scope=True) + self.outdent() + + return frame, macro_ref + + def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: + """Dump the macro definition for the def created by macro_body.""" + arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) + name = getattr(macro_ref.node, "name", None) + if len(macro_ref.node.args) == 1: + arg_tuple += "," + self.write( + f"Macro(environment, macro, {name!r}, ({arg_tuple})," + f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," + f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" + ) + + def position(self, node: nodes.Node) -> str: + """Return a human readable position for the node.""" + rv = f"line {node.lineno}" + if self.name is not None: + rv = f"{rv} in {self.name!r}" + return rv + + def dump_local_context(self, frame: Frame) -> str: + items_kv = ", ".join( + f"{name!r}: {target}" + for name, target in frame.symbols.dump_stores().items() + ) + return f"{{{items_kv}}}" + + def write_commons(self) -> None: + """Writes a common preamble that is used by root and block functions. + Primarily this sets up common local helpers and enforces a generator + through a dead branch. + """ + self.writeline("resolve = context.resolve_or_missing") + self.writeline("undefined = environment.undefined") + self.writeline("concat = environment.concat") + # always use the standard Undefined class for the implicit else of + # conditional expressions + self.writeline("cond_expr_undefined = Undefined") + self.writeline("if 0: yield None") + + def push_parameter_definitions(self, frame: Frame) -> None: + """Pushes all parameter targets from the given frame into a local + stack that permits tracking of yet to be assigned parameters. In + particular this enables the optimization from `visit_Name` to skip + undefined expressions for parameters in macros as macros can reference + otherwise unbound parameters. + """ + self._param_def_block.append(frame.symbols.dump_param_targets()) + + def pop_parameter_definitions(self) -> None: + """Pops the current parameter definitions set.""" + self._param_def_block.pop() + + def mark_parameter_stored(self, target: str) -> None: + """Marks a parameter in the current parameter definitions as stored. + This will skip the enforced undefined checks. + """ + if self._param_def_block: + self._param_def_block[-1].discard(target) + + def push_context_reference(self, target: str) -> None: + self._context_reference_stack.append(target) + + def pop_context_reference(self) -> None: + self._context_reference_stack.pop() + + def get_context_ref(self) -> str: + return self._context_reference_stack[-1] + + def get_resolve_func(self) -> str: + target = self._context_reference_stack[-1] + if target == "context": + return "resolve" + return f"{target}.resolve" + + def derive_context(self, frame: Frame) -> str: + return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" + + def parameter_is_undeclared(self, target: str) -> bool: + """Checks if a given target is an undeclared parameter.""" + if not self._param_def_block: + return False + return target in self._param_def_block[-1] + + def push_assign_tracking(self) -> None: + """Pushes a new layer for assignment tracking.""" + self._assign_stack.append(set()) + + def pop_assign_tracking(self, frame: Frame) -> None: + """Pops the topmost level for assignment tracking and updates the + context variables if necessary. + """ + vars = self._assign_stack.pop() + if ( + not frame.block_frame + and not frame.loop_frame + and not frame.toplevel + or not vars + ): + return + public_names = [x for x in vars if x[:1] != "_"] + if len(vars) == 1: + name = next(iter(vars)) + ref = frame.symbols.ref(name) + if frame.loop_frame: + self.writeline(f"_loop_vars[{name!r}] = {ref}") + return + if frame.block_frame: + self.writeline(f"_block_vars[{name!r}] = {ref}") + return + self.writeline(f"context.vars[{name!r}] = {ref}") + else: + if frame.loop_frame: + self.writeline("_loop_vars.update({") + elif frame.block_frame: + self.writeline("_block_vars.update({") + else: + self.writeline("context.vars.update({") + for idx, name in enumerate(sorted(vars)): + if idx: + self.write(", ") + ref = frame.symbols.ref(name) + self.write(f"{name!r}: {ref}") + self.write("})") + if not frame.block_frame and not frame.loop_frame and public_names: + if len(public_names) == 1: + self.writeline(f"context.exported_vars.add({public_names[0]!r})") + else: + names_str = ", ".join(map(repr, sorted(public_names))) + self.writeline(f"context.exported_vars.update(({names_str}))") + + # -- Statement Visitors + + def visit_Template( + self, node: nodes.Template, frame: t.Optional[Frame] = None + ) -> None: + assert frame is None, "no root frame allowed" + eval_ctx = EvalContext(self.environment, self.name) + + from .runtime import async_exported + from .runtime import exported + + if self.environment.is_async: + exported_names = sorted(exported + async_exported) + else: + exported_names = sorted(exported) + + self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) + + # if we want a deferred initialization we cannot move the + # environment into a local name + envenv = "" if self.defer_init else ", environment=environment" + + # do we have an extends tag at all? If not, we can save some + # overhead by just not processing any inheritance code. + have_extends = node.find(nodes.Extends) is not None + + # find all blocks + for block in node.find_all(nodes.Block): + if block.name in self.blocks: + self.fail(f"block {block.name!r} defined twice", block.lineno) + self.blocks[block.name] = block + + # find all imports and import them + for import_ in node.find_all(nodes.ImportedName): + if import_.importname not in self.import_aliases: + imp = import_.importname + self.import_aliases[imp] = alias = self.temporary_identifier() + if "." in imp: + module, obj = imp.rsplit(".", 1) + self.writeline(f"from {module} import {obj} as {alias}") + else: + self.writeline(f"import {imp} as {alias}") + + # add the load name + self.writeline(f"name = {self.name!r}") + + # generate the root render function. + self.writeline( + f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 + ) + self.indent() + self.write_commons() + + # process the root + frame = Frame(eval_ctx) + if "self" in find_undeclared(node.body, ("self",)): + ref = frame.symbols.declare_parameter("self") + self.writeline(f"{ref} = TemplateReference(context)") + frame.symbols.analyze_node(node) + frame.toplevel = frame.rootlevel = True + frame.require_output_check = have_extends and not self.has_known_extends + if have_extends: + self.writeline("parent_template = None") + self.enter_frame(frame) + self.pull_dependencies(node.body) + self.blockvisit(node.body, frame) + self.leave_frame(frame, with_python_scope=True) + self.outdent() + + # make sure that the parent root is called. + if have_extends: + if not self.has_known_extends: + self.indent() + self.writeline("if parent_template is not None:") + self.indent() + if not self.environment.is_async: + self.writeline("yield from parent_template.root_render_func(context)") + else: + self.writeline("agen = parent_template.root_render_func(context)") + self.writeline("try:") + self.indent() + self.writeline("async for event in agen:") + self.indent() + self.writeline("yield event") + self.outdent() + self.outdent() + self.writeline("finally: await agen.aclose()") + self.outdent(1 + (not self.has_known_extends)) + + # at this point we now have the blocks collected and can visit them too. + for name, block in self.blocks.items(): + self.writeline( + f"{self.func('block_' + name)}(context, missing=missing{envenv}):", + block, + 1, + ) + self.indent() + self.write_commons() + # It's important that we do not make this frame a child of the + # toplevel template. This would cause a variety of + # interesting issues with identifier tracking. + block_frame = Frame(eval_ctx) + block_frame.block_frame = True + undeclared = find_undeclared(block.body, ("self", "super")) + if "self" in undeclared: + ref = block_frame.symbols.declare_parameter("self") + self.writeline(f"{ref} = TemplateReference(context)") + if "super" in undeclared: + ref = block_frame.symbols.declare_parameter("super") + self.writeline(f"{ref} = context.super({name!r}, block_{name})") + block_frame.symbols.analyze_node(block) + block_frame.block = name + self.writeline("_block_vars = {}") + self.enter_frame(block_frame) + self.pull_dependencies(block.body) + self.blockvisit(block.body, block_frame) + self.leave_frame(block_frame, with_python_scope=True) + self.outdent() + + blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) + self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) + debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) + self.writeline(f"debug_info = {debug_kv_str!r}") + + def visit_Block(self, node: nodes.Block, frame: Frame) -> None: + """Call a block and register it for the template.""" + level = 0 + if frame.toplevel: + # if we know that we are a child template, there is no need to + # check if we are one + if self.has_known_extends: + return + if self.extends_so_far > 0: + self.writeline("if parent_template is None:") + self.indent() + level += 1 + + if node.scoped: + context = self.derive_context(frame) + else: + context = self.get_context_ref() + + if node.required: + self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) + self.indent() + self.writeline( + f'raise TemplateRuntimeError("Required block {node.name!r} not found")', + node, + ) + self.outdent() + + if not self.environment.is_async and frame.buffer is None: + self.writeline( + f"yield from context.blocks[{node.name!r}][0]({context})", node + ) + else: + self.writeline(f"gen = context.blocks[{node.name!r}][0]({context})") + self.writeline("try:") + self.indent() + self.writeline( + f"{self.choose_async()}for event in gen:", + node, + ) + self.indent() + self.simple_write("event", frame) + self.outdent() + self.outdent() + self.writeline( + f"finally: {self.choose_async('await gen.aclose()', 'gen.close()')}" + ) + + self.outdent(level) + + def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: + """Calls the extender.""" + if not frame.toplevel: + self.fail("cannot use extend from a non top-level scope", node.lineno) + + # if the number of extends statements in general is zero so + # far, we don't have to add a check if something extended + # the template before this one. + if self.extends_so_far > 0: + # if we have a known extends we just add a template runtime + # error into the generated code. We could catch that at compile + # time too, but i welcome it not to confuse users by throwing the + # same error at different times just "because we can". + if not self.has_known_extends: + self.writeline("if parent_template is not None:") + self.indent() + self.writeline('raise TemplateRuntimeError("extended multiple times")') + + # if we have a known extends already we don't need that code here + # as we know that the template execution will end here. + if self.has_known_extends: + raise CompilerExit() + else: + self.outdent() + + self.writeline("parent_template = environment.get_template(", node) + self.visit(node.template, frame) + self.write(f", {self.name!r})") + self.writeline("for name, parent_block in parent_template.blocks.items():") + self.indent() + self.writeline("context.blocks.setdefault(name, []).append(parent_block)") + self.outdent() + + # if this extends statement was in the root level we can take + # advantage of that information and simplify the generated code + # in the top level from this point onwards + if frame.rootlevel: + self.has_known_extends = True + + # and now we have one more + self.extends_so_far += 1 + + def visit_Include(self, node: nodes.Include, frame: Frame) -> None: + """Handles includes.""" + if node.ignore_missing: + self.writeline("try:") + self.indent() + + func_name = "get_or_select_template" + if isinstance(node.template, nodes.Const): + if isinstance(node.template.value, str): + func_name = "get_template" + elif isinstance(node.template.value, (tuple, list)): + func_name = "select_template" + elif isinstance(node.template, (nodes.Tuple, nodes.List)): + func_name = "select_template" + + self.writeline(f"template = environment.{func_name}(", node) + self.visit(node.template, frame) + self.write(f", {self.name!r})") + if node.ignore_missing: + self.outdent() + self.writeline("except TemplateNotFound:") + self.indent() + self.writeline("pass") + self.outdent() + self.writeline("else:") + self.indent() + + def loop_body() -> None: + self.indent() + self.simple_write("event", frame) + self.outdent() + + if node.with_context: + self.writeline( + f"gen = template.root_render_func(" + "template.new_context(context.get_all(), True," + f" {self.dump_local_context(frame)}))" + ) + self.writeline("try:") + self.indent() + self.writeline(f"{self.choose_async()}for event in gen:") + loop_body() + self.outdent() + self.writeline( + f"finally: {self.choose_async('await gen.aclose()', 'gen.close()')}" + ) + elif self.environment.is_async: + self.writeline( + "for event in (await template._get_default_module_async())" + "._body_stream:" + ) + loop_body() + else: + self.writeline("yield from template._get_default_module()._body_stream") + + if node.ignore_missing: + self.outdent() + + def _import_common( + self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame + ) -> None: + self.write(f"{self.choose_async('await ')}environment.get_template(") + self.visit(node.template, frame) + self.write(f", {self.name!r}).") + + if node.with_context: + f_name = f"make_module{self.choose_async('_async')}" + self.write( + f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" + ) + else: + self.write(f"_get_default_module{self.choose_async('_async')}(context)") + + def visit_Import(self, node: nodes.Import, frame: Frame) -> None: + """Visit regular imports.""" + self.writeline(f"{frame.symbols.ref(node.target)} = ", node) + if frame.toplevel: + self.write(f"context.vars[{node.target!r}] = ") + + self._import_common(node, frame) + + if frame.toplevel and not node.target.startswith("_"): + self.writeline(f"context.exported_vars.discard({node.target!r})") + + def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: + """Visit named imports.""" + self.newline(node) + self.write("included_template = ") + self._import_common(node, frame) + var_names = [] + discarded_names = [] + for name in node.names: + if isinstance(name, tuple): + name, alias = name + else: + alias = name + self.writeline( + f"{frame.symbols.ref(alias)} =" + f" getattr(included_template, {name!r}, missing)" + ) + self.writeline(f"if {frame.symbols.ref(alias)} is missing:") + self.indent() + # The position will contain the template name, and will be formatted + # into a string that will be compiled into an f-string. Curly braces + # in the name must be replaced with escapes so that they will not be + # executed as part of the f-string. + position = self.position(node).replace("{", "{{").replace("}", "}}") + message = ( + "the template {included_template.__name__!r}" + f" (imported on {position})" + f" does not export the requested name {name!r}" + ) + self.writeline( + f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" + ) + self.outdent() + if frame.toplevel: + var_names.append(alias) + if not alias.startswith("_"): + discarded_names.append(alias) + + if var_names: + if len(var_names) == 1: + name = var_names[0] + self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") + else: + names_kv = ", ".join( + f"{name!r}: {frame.symbols.ref(name)}" for name in var_names + ) + self.writeline(f"context.vars.update({{{names_kv}}})") + if discarded_names: + if len(discarded_names) == 1: + self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") + else: + names_str = ", ".join(map(repr, discarded_names)) + self.writeline( + f"context.exported_vars.difference_update(({names_str}))" + ) + + def visit_For(self, node: nodes.For, frame: Frame) -> None: + loop_frame = frame.inner() + loop_frame.loop_frame = True + test_frame = frame.inner() + else_frame = frame.inner() + + # try to figure out if we have an extended loop. An extended loop + # is necessary if the loop is in recursive mode if the special loop + # variable is accessed in the body if the body is a scoped block. + extended_loop = ( + node.recursive + or "loop" + in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) + or any(block.scoped for block in node.find_all(nodes.Block)) + ) + + loop_ref = None + if extended_loop: + loop_ref = loop_frame.symbols.declare_parameter("loop") + + loop_frame.symbols.analyze_node(node, for_branch="body") + if node.else_: + else_frame.symbols.analyze_node(node, for_branch="else") + + if node.test: + loop_filter_func = self.temporary_identifier() + test_frame.symbols.analyze_node(node, for_branch="test") + self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) + self.indent() + self.enter_frame(test_frame) + self.writeline(self.choose_async("async for ", "for ")) + self.visit(node.target, loop_frame) + self.write(" in ") + self.write(self.choose_async("auto_aiter(fiter)", "fiter")) + self.write(":") + self.indent() + self.writeline("if ", node.test) + self.visit(node.test, test_frame) + self.write(":") + self.indent() + self.writeline("yield ") + self.visit(node.target, loop_frame) + self.outdent(3) + self.leave_frame(test_frame, with_python_scope=True) + + # if we don't have an recursive loop we have to find the shadowed + # variables at that point. Because loops can be nested but the loop + # variable is a special one we have to enforce aliasing for it. + if node.recursive: + self.writeline( + f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node + ) + self.indent() + self.buffer(loop_frame) + + # Use the same buffer for the else frame + else_frame.buffer = loop_frame.buffer + + # make sure the loop variable is a special one and raise a template + # assertion error if a loop tries to write to loop + if extended_loop: + self.writeline(f"{loop_ref} = missing") + + for name in node.find_all(nodes.Name): + if name.ctx == "store" and name.name == "loop": + self.fail( + "Can't assign to special loop variable in for-loop target", + name.lineno, + ) + + if node.else_: + iteration_indicator = self.temporary_identifier() + self.writeline(f"{iteration_indicator} = 1") + + self.writeline(self.choose_async("async for ", "for "), node) + self.visit(node.target, loop_frame) + if extended_loop: + self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") + else: + self.write(" in ") + + if node.test: + self.write(f"{loop_filter_func}(") + if node.recursive: + self.write("reciter") + else: + if self.environment.is_async and not extended_loop: + self.write("auto_aiter(") + self.visit(node.iter, frame) + if self.environment.is_async and not extended_loop: + self.write(")") + if node.test: + self.write(")") + + if node.recursive: + self.write(", undefined, loop_render_func, depth):") + else: + self.write(", undefined):" if extended_loop else ":") + + self.indent() + self.enter_frame(loop_frame) + + self.writeline("_loop_vars = {}") + self.blockvisit(node.body, loop_frame) + if node.else_: + self.writeline(f"{iteration_indicator} = 0") + self.outdent() + self.leave_frame( + loop_frame, with_python_scope=node.recursive and not node.else_ + ) + + if node.else_: + self.writeline(f"if {iteration_indicator}:") + self.indent() + self.enter_frame(else_frame) + self.blockvisit(node.else_, else_frame) + self.leave_frame(else_frame) + self.outdent() + + # if the node was recursive we have to return the buffer contents + # and start the iteration code + if node.recursive: + self.return_buffer_contents(loop_frame) + self.outdent() + self.start_write(frame, node) + self.write(f"{self.choose_async('await ')}loop(") + if self.environment.is_async: + self.write("auto_aiter(") + self.visit(node.iter, frame) + if self.environment.is_async: + self.write(")") + self.write(", loop)") + self.end_write(frame) + + # at the end of the iteration, clear any assignments made in the + # loop from the top level + if self._assign_stack: + self._assign_stack[-1].difference_update(loop_frame.symbols.stores) + + def visit_If(self, node: nodes.If, frame: Frame) -> None: + if_frame = frame.soft() + self.writeline("if ", node) + self.visit(node.test, if_frame) + self.write(":") + self.indent() + self.blockvisit(node.body, if_frame) + self.outdent() + for elif_ in node.elif_: + self.writeline("elif ", elif_) + self.visit(elif_.test, if_frame) + self.write(":") + self.indent() + self.blockvisit(elif_.body, if_frame) + self.outdent() + if node.else_: + self.writeline("else:") + self.indent() + self.blockvisit(node.else_, if_frame) + self.outdent() + + def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: + macro_frame, macro_ref = self.macro_body(node, frame) + self.newline() + if frame.toplevel: + if not node.name.startswith("_"): + self.write(f"context.exported_vars.add({node.name!r})") + self.writeline(f"context.vars[{node.name!r}] = ") + self.write(f"{frame.symbols.ref(node.name)} = ") + self.macro_def(macro_ref, macro_frame) + + def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: + call_frame, macro_ref = self.macro_body(node, frame) + self.writeline("caller = ") + self.macro_def(macro_ref, call_frame) + self.start_write(frame, node) + self.visit_Call(node.call, frame, forward_caller=True) + self.end_write(frame) + + def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: + filter_frame = frame.inner() + filter_frame.symbols.analyze_node(node) + self.enter_frame(filter_frame) + self.buffer(filter_frame) + self.blockvisit(node.body, filter_frame) + self.start_write(frame, node) + self.visit_Filter(node.filter, filter_frame) + self.end_write(frame) + self.leave_frame(filter_frame) + + def visit_With(self, node: nodes.With, frame: Frame) -> None: + with_frame = frame.inner() + with_frame.symbols.analyze_node(node) + self.enter_frame(with_frame) + for target, expr in zip(node.targets, node.values): + self.newline() + self.visit(target, with_frame) + self.write(" = ") + self.visit(expr, frame) + self.blockvisit(node.body, with_frame) + self.leave_frame(with_frame) + + def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: + self.newline(node) + self.visit(node.node, frame) + + class _FinalizeInfo(t.NamedTuple): + const: t.Optional[t.Callable[..., str]] + src: t.Optional[str] + + @staticmethod + def _default_finalize(value: t.Any) -> t.Any: + """The default finalize function if the environment isn't + configured with one. Or, if the environment has one, this is + called on that function's output for constants. + """ + return str(value) + + _finalize: t.Optional[_FinalizeInfo] = None + + def _make_finalize(self) -> _FinalizeInfo: + """Build the finalize function to be used on constants and at + runtime. Cached so it's only created once for all output nodes. + + Returns a ``namedtuple`` with the following attributes: + + ``const`` + A function to finalize constant data at compile time. + + ``src`` + Source code to output around nodes to be evaluated at + runtime. + """ + if self._finalize is not None: + return self._finalize + + finalize: t.Optional[t.Callable[..., t.Any]] + finalize = default = self._default_finalize + src = None + + if self.environment.finalize: + src = "environment.finalize(" + env_finalize = self.environment.finalize + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(env_finalize) # type: ignore + ) + finalize = None + + if pass_arg is None: + + def finalize(value: t.Any) -> t.Any: # noqa: F811 + return default(env_finalize(value)) + + else: + src = f"{src}{pass_arg}, " + + if pass_arg == "environment": + + def finalize(value: t.Any) -> t.Any: # noqa: F811 + return default(env_finalize(self.environment, value)) + + self._finalize = self._FinalizeInfo(finalize, src) + return self._finalize + + def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: + """Given a group of constant values converted from ``Output`` + child nodes, produce a string to write to the template module + source. + """ + return repr(concat(group)) + + def _output_child_to_const( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> str: + """Try to optimize a child of an ``Output`` node by trying to + convert it to constant, finalized data at compile time. + + If :exc:`Impossible` is raised, the node is not constant and + will be evaluated at runtime. Any other exception will also be + evaluated at runtime for easier debugging. + """ + const = node.as_const(frame.eval_ctx) + + if frame.eval_ctx.autoescape: + const = escape(const) + + # Template data doesn't go through finalize. + if isinstance(node, nodes.TemplateData): + return str(const) + + return finalize.const(const) # type: ignore + + def _output_child_pre( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: + """Output extra source code before visiting a child of an + ``Output`` node. + """ + if frame.eval_ctx.volatile: + self.write("(escape if context.eval_ctx.autoescape else str)(") + elif frame.eval_ctx.autoescape: + self.write("escape(") + else: + self.write("str(") + + if finalize.src is not None: + self.write(finalize.src) + + def _output_child_post( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: + """Output extra source code after visiting a child of an + ``Output`` node. + """ + self.write(")") + + if finalize.src is not None: + self.write(")") + + def visit_Output(self, node: nodes.Output, frame: Frame) -> None: + # If an extends is active, don't render outside a block. + if frame.require_output_check: + # A top-level extends is known to exist at compile time. + if self.has_known_extends: + return + + self.writeline("if parent_template is None:") + self.indent() + + finalize = self._make_finalize() + body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] + + # Evaluate constants at compile time if possible. Each item in + # body will be either a list of static data or a node to be + # evaluated at runtime. + for child in node.nodes: + try: + if not ( + # If the finalize function requires runtime context, + # constants can't be evaluated at compile time. + finalize.const + # Unless it's basic template data that won't be + # finalized anyway. + or isinstance(child, nodes.TemplateData) + ): + raise nodes.Impossible() + + const = self._output_child_to_const(child, frame, finalize) + except (nodes.Impossible, Exception): + # The node was not constant and needs to be evaluated at + # runtime. Or another error was raised, which is easier + # to debug at runtime. + body.append(child) + continue + + if body and isinstance(body[-1], list): + body[-1].append(const) + else: + body.append([const]) + + if frame.buffer is not None: + if len(body) == 1: + self.writeline(f"{frame.buffer}.append(") + else: + self.writeline(f"{frame.buffer}.extend((") + + self.indent() + + for item in body: + if isinstance(item, list): + # A group of constant data to join and output. + val = self._output_const_repr(item) + + if frame.buffer is None: + self.writeline("yield " + val) + else: + self.writeline(val + ",") + else: + if frame.buffer is None: + self.writeline("yield ", item) + else: + self.newline(item) + + # A node to be evaluated at runtime. + self._output_child_pre(item, frame, finalize) + self.visit(item, frame) + self._output_child_post(item, frame, finalize) + + if frame.buffer is not None: + self.write(",") + + if frame.buffer is not None: + self.outdent() + self.writeline(")" if len(body) == 1 else "))") + + if frame.require_output_check: + self.outdent() + + def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: + self.push_assign_tracking() + + # ``a.b`` is allowed for assignment, and is parsed as an NSRef. However, + # it is only valid if it references a Namespace object. Emit a check for + # that for each ref here, before assignment code is emitted. This can't + # be done in visit_NSRef as the ref could be in the middle of a tuple. + seen_refs: t.Set[str] = set() + + for nsref in node.find_all(nodes.NSRef): + if nsref.name in seen_refs: + # Only emit the check for each reference once, in case the same + # ref is used multiple times in a tuple, `ns.a, ns.b = c, d`. + continue + + seen_refs.add(nsref.name) + ref = frame.symbols.ref(nsref.name) + self.writeline(f"if not isinstance({ref}, Namespace):") + self.indent() + self.writeline( + "raise TemplateRuntimeError" + '("cannot assign attribute on non-namespace object")' + ) + self.outdent() + + self.newline(node) + self.visit(node.target, frame) + self.write(" = ") + self.visit(node.node, frame) + self.pop_assign_tracking(frame) + + def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: + self.push_assign_tracking() + block_frame = frame.inner() + # This is a special case. Since a set block always captures we + # will disable output checks. This way one can use set blocks + # toplevel even in extended templates. + block_frame.require_output_check = False + block_frame.symbols.analyze_node(node) + self.enter_frame(block_frame) + self.buffer(block_frame) + self.blockvisit(node.body, block_frame) + self.newline(node) + self.visit(node.target, frame) + self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") + if node.filter is not None: + self.visit_Filter(node.filter, block_frame) + else: + self.write(f"concat({block_frame.buffer})") + self.write(")") + self.pop_assign_tracking(frame) + self.leave_frame(block_frame) + + # -- Expression Visitors + + def visit_Name(self, node: nodes.Name, frame: Frame) -> None: + if node.ctx == "store" and ( + frame.toplevel or frame.loop_frame or frame.block_frame + ): + if self._assign_stack: + self._assign_stack[-1].add(node.name) + ref = frame.symbols.ref(node.name) + + # If we are looking up a variable we might have to deal with the + # case where it's undefined. We can skip that case if the load + # instruction indicates a parameter which are always defined. + if node.ctx == "load": + load = frame.symbols.find_load(ref) + if not ( + load is not None + and load[0] == VAR_LOAD_PARAMETER + and not self.parameter_is_undeclared(ref) + ): + self.write( + f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" + ) + return + + self.write(ref) + + def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: + # NSRef is a dotted assignment target a.b=c, but uses a[b]=c internally. + # visit_Assign emits code to validate that each ref is to a Namespace + # object only. That can't be emitted here as the ref could be in the + # middle of a tuple assignment. + ref = frame.symbols.ref(node.name) + self.writeline(f"{ref}[{node.attr!r}]") + + def visit_Const(self, node: nodes.Const, frame: Frame) -> None: + val = node.as_const(frame.eval_ctx) + if isinstance(val, float): + self.write(str(val)) + else: + self.write(repr(val)) + + def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: + try: + self.write(repr(node.as_const(frame.eval_ctx))) + except nodes.Impossible: + self.write( + f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" + ) + + def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: + self.write("(") + idx = -1 + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item, frame) + self.write(",)" if idx == 0 else ")") + + def visit_List(self, node: nodes.List, frame: Frame) -> None: + self.write("[") + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item, frame) + self.write("]") + + def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: + self.write("{") + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item.key, frame) + self.write(": ") + self.visit(item.value, frame) + self.write("}") + + visit_Add = _make_binop("+") + visit_Sub = _make_binop("-") + visit_Mul = _make_binop("*") + visit_Div = _make_binop("/") + visit_FloorDiv = _make_binop("//") + visit_Pow = _make_binop("**") + visit_Mod = _make_binop("%") + visit_And = _make_binop("and") + visit_Or = _make_binop("or") + visit_Pos = _make_unop("+") + visit_Neg = _make_unop("-") + visit_Not = _make_unop("not ") + + @optimizeconst + def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: + if frame.eval_ctx.volatile: + func_name = "(markup_join if context.eval_ctx.volatile else str_join)" + elif frame.eval_ctx.autoescape: + func_name = "markup_join" + else: + func_name = "str_join" + self.write(f"{func_name}((") + for arg in node.nodes: + self.visit(arg, frame) + self.write(", ") + self.write("))") + + @optimizeconst + def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: + self.write("(") + self.visit(node.expr, frame) + for op in node.ops: + self.visit(op, frame) + self.write(")") + + def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: + self.write(f" {operators[node.op]} ") + self.visit(node.expr, frame) + + @optimizeconst + def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: + if self.environment.is_async: + self.write("(await auto_await(") + + self.write("environment.getattr(") + self.visit(node.node, frame) + self.write(f", {node.attr!r})") + + if self.environment.is_async: + self.write("))") + + @optimizeconst + def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: + # slices bypass the environment getitem method. + if isinstance(node.arg, nodes.Slice): + self.visit(node.node, frame) + self.write("[") + self.visit(node.arg, frame) + self.write("]") + else: + if self.environment.is_async: + self.write("(await auto_await(") + + self.write("environment.getitem(") + self.visit(node.node, frame) + self.write(", ") + self.visit(node.arg, frame) + self.write(")") + + if self.environment.is_async: + self.write("))") + + def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: + if node.start is not None: + self.visit(node.start, frame) + self.write(":") + if node.stop is not None: + self.visit(node.stop, frame) + if node.step is not None: + self.write(":") + self.visit(node.step, frame) + + @contextmanager + def _filter_test_common( + self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool + ) -> t.Iterator[None]: + if self.environment.is_async: + self.write("(await auto_await(") + + if is_filter: + self.write(f"{self.filters[node.name]}(") + func = self.environment.filters.get(node.name) + else: + self.write(f"{self.tests[node.name]}(") + func = self.environment.tests.get(node.name) + + # When inside an If or CondExpr frame, allow the filter to be + # undefined at compile time and only raise an error if it's + # actually called at runtime. See pull_dependencies. + if func is None and not frame.soft_frame: + type_name = "filter" if is_filter else "test" + self.fail(f"No {type_name} named {node.name!r}.", node.lineno) + + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(func) # type: ignore + ) + + if pass_arg is not None: + self.write(f"{pass_arg}, ") + + # Back to the visitor function to handle visiting the target of + # the filter or test. + yield + + self.signature(node, frame) + self.write(")") + + if self.environment.is_async: + self.write("))") + + @optimizeconst + def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: + with self._filter_test_common(node, frame, True): + # if the filter node is None we are inside a filter block + # and want to write to the current buffer + if node.node is not None: + self.visit(node.node, frame) + elif frame.eval_ctx.volatile: + self.write( + f"(Markup(concat({frame.buffer}))" + f" if context.eval_ctx.autoescape else concat({frame.buffer}))" + ) + elif frame.eval_ctx.autoescape: + self.write(f"Markup(concat({frame.buffer}))") + else: + self.write(f"concat({frame.buffer})") + + @optimizeconst + def visit_Test(self, node: nodes.Test, frame: Frame) -> None: + with self._filter_test_common(node, frame, False): + self.visit(node.node, frame) + + @optimizeconst + def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: + frame = frame.soft() + + def write_expr2() -> None: + if node.expr2 is not None: + self.visit(node.expr2, frame) + return + + self.write( + f'cond_expr_undefined("the inline if-expression on' + f" {self.position(node)} evaluated to false and no else" + f' section was defined.")' + ) + + self.write("(") + self.visit(node.expr1, frame) + self.write(" if ") + self.visit(node.test, frame) + self.write(" else ") + write_expr2() + self.write(")") + + @optimizeconst + def visit_Call( + self, node: nodes.Call, frame: Frame, forward_caller: bool = False + ) -> None: + if self.environment.is_async: + self.write("(await auto_await(") + if self.environment.sandboxed: + self.write("environment.call(context, ") + else: + self.write("context.call(") + self.visit(node.node, frame) + extra_kwargs = {"caller": "caller"} if forward_caller else None + loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} + block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} + if extra_kwargs: + extra_kwargs.update(loop_kwargs, **block_kwargs) + elif loop_kwargs or block_kwargs: + extra_kwargs = dict(loop_kwargs, **block_kwargs) + self.signature(node, frame, extra_kwargs) + self.write(")") + if self.environment.is_async: + self.write("))") + + def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: + self.write(node.key + "=") + self.visit(node.value, frame) + + # -- Unused nodes for extensions + + def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: + self.write("Markup(") + self.visit(node.expr, frame) + self.write(")") + + def visit_MarkSafeIfAutoescape( + self, node: nodes.MarkSafeIfAutoescape, frame: Frame + ) -> None: + self.write("(Markup if context.eval_ctx.autoescape else identity)(") + self.visit(node.expr, frame) + self.write(")") + + def visit_EnvironmentAttribute( + self, node: nodes.EnvironmentAttribute, frame: Frame + ) -> None: + self.write("environment." + node.name) + + def visit_ExtensionAttribute( + self, node: nodes.ExtensionAttribute, frame: Frame + ) -> None: + self.write(f"environment.extensions[{node.identifier!r}].{node.name}") + + def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: + self.write(self.import_aliases[node.importname]) + + def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: + self.write(node.name) + + def visit_ContextReference( + self, node: nodes.ContextReference, frame: Frame + ) -> None: + self.write("context") + + def visit_DerivedContextReference( + self, node: nodes.DerivedContextReference, frame: Frame + ) -> None: + self.write(self.derive_context(frame)) + + def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: + self.writeline("continue", node) + + def visit_Break(self, node: nodes.Break, frame: Frame) -> None: + self.writeline("break", node) + + def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: + scope_frame = frame.inner() + scope_frame.symbols.analyze_node(node) + self.enter_frame(scope_frame) + self.blockvisit(node.body, scope_frame) + self.leave_frame(scope_frame) + + def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: + ctx = self.temporary_identifier() + self.writeline(f"{ctx} = {self.derive_context(frame)}") + self.writeline(f"{ctx}.vars = ") + self.visit(node.context, frame) + self.push_context_reference(ctx) + + scope_frame = frame.inner(isolated=True) + scope_frame.symbols.analyze_node(node) + self.enter_frame(scope_frame) + self.blockvisit(node.body, scope_frame) + self.leave_frame(scope_frame) + self.pop_context_reference() + + def visit_EvalContextModifier( + self, node: nodes.EvalContextModifier, frame: Frame + ) -> None: + for keyword in node.options: + self.writeline(f"context.eval_ctx.{keyword.key} = ") + self.visit(keyword.value, frame) + try: + val = keyword.value.as_const(frame.eval_ctx) + except nodes.Impossible: + frame.eval_ctx.volatile = True + else: + setattr(frame.eval_ctx, keyword.key, val) + + def visit_ScopedEvalContextModifier( + self, node: nodes.ScopedEvalContextModifier, frame: Frame + ) -> None: + old_ctx_name = self.temporary_identifier() + saved_ctx = frame.eval_ctx.save() + self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") + self.visit_EvalContextModifier(node, frame) + for child in node.body: + self.visit(child, frame) + frame.eval_ctx.revert(saved_ctx) + self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/.venv/Lib/site-packages/jinja2/constants.py b/.venv/Lib/site-packages/jinja2/constants.py new file mode 100644 index 00000000..41a1c23b --- /dev/null +++ b/.venv/Lib/site-packages/jinja2/constants.py @@ -0,0 +1,20 @@ +#: list of lorem ipsum words used by the lipsum() helper function +LOREM_IPSUM_WORDS = """\ +a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at +auctor augue bibendum blandit class commodo condimentum congue consectetuer +consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus +diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend +elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames +faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac +hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum +justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem +luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie +mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non +nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque +penatibus per pharetra phasellus placerat platea porta porttitor posuere +potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus +ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit +sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor +tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices +ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus +viverra volutpat vulputate""" diff --git a/.venv/Lib/site-packages/jinja2/debug.py b/.venv/Lib/site-packages/jinja2/debug.py new file mode 100644 index 00000000..eeeeee78 --- /dev/null +++ b/.venv/Lib/site-packages/jinja2/debug.py @@ -0,0 +1,191 @@ +import sys +import typing as t +from types import CodeType +from types import TracebackType + +from .exceptions import TemplateSyntaxError +from .utils import internal_code +from .utils import missing + +if t.TYPE_CHECKING: + from .runtime import Context + + +def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: + """Rewrite the current exception to replace any tracebacks from + within compiled template code with tracebacks that look like they + came from the template source. + + This must be called within an ``except`` block. + + :param source: For ``TemplateSyntaxError``, the original source if + known. + :return: The original exception with the rewritten traceback. + """ + _, exc_value, tb = sys.exc_info() + exc_value = t.cast(BaseException, exc_value) + tb = t.cast(TracebackType, tb) + + if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: + exc_value.translated = True + exc_value.source = source + # Remove the old traceback, otherwise the frames from the + # compiler still show up. + exc_value.with_traceback(None) + # Outside of runtime, so the frame isn't executing template + # code, but it still needs to point at the template. + tb = fake_traceback( + exc_value, None, exc_value.filename or "", exc_value.lineno + ) + else: + # Skip the frame for the render function. + tb = tb.tb_next + + stack = [] + + # Build the stack of traceback object, replacing any in template + # code with the source file and line information. + while tb is not None: + # Skip frames decorated with @internalcode. These are internal + # calls that aren't useful in template debugging output. + if tb.tb_frame.f_code in internal_code: + tb = tb.tb_next + continue + + template = tb.tb_frame.f_globals.get("__jinja_template__") + + if template is not None: + lineno = template.get_corresponding_lineno(tb.tb_lineno) + fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) + stack.append(fake_tb) + else: + stack.append(tb) + + tb = tb.tb_next + + tb_next = None + + # Assign tb_next in reverse to avoid circular references. + for tb in reversed(stack): + tb.tb_next = tb_next + tb_next = tb + + return exc_value.with_traceback(tb_next) + + +def fake_traceback( # type: ignore + exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int +) -> TracebackType: + """Produce a new traceback object that looks like it came from the + template source instead of the compiled code. The filename, line + number, and location name will point to the template, and the local + variables will be the current template context. + + :param exc_value: The original exception to be re-raised to create + the new traceback. + :param tb: The original traceback to get the local variables and + code info from. + :param filename: The template filename. + :param lineno: The line number in the template source. + """ + if tb is not None: + # Replace the real locals with the context that would be + # available at that point in the template. + locals = get_template_locals(tb.tb_frame.f_locals) + locals.pop("__jinja_exception__", None) + else: + locals = {} + + globals = { + "__name__": filename, + "__file__": filename, + "__jinja_exception__": exc_value, + } + # Raise an exception at the correct line number. + code: CodeType = compile( + "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" + ) + + # Build a new code object that points to the template file and + # replaces the location with a block name. + location = "template" + + if tb is not None: + function = tb.tb_frame.f_code.co_name + + if function == "root": + location = "top-level template code" + elif function.startswith("block_"): + location = f"block {function[6:]!r}" + + if sys.version_info >= (3, 8): + code = code.replace(co_name=location) + else: + code = CodeType( + code.co_argcount, + code.co_kwonlyargcount, + code.co_nlocals, + code.co_stacksize, + code.co_flags, + code.co_code, + code.co_consts, + code.co_names, + code.co_varnames, + code.co_filename, + location, + code.co_firstlineno, + code.co_lnotab, + code.co_freevars, + code.co_cellvars, + ) + + # Execute the new code, which is guaranteed to raise, and return + # the new traceback without this frame. + try: + exec(code, globals, locals) + except BaseException: + return sys.exc_info()[2].tb_next # type: ignore + + +def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: + """Based on the runtime locals, get the context that would be + available at that point in the template. + """ + # Start with the current template context. + ctx: t.Optional[Context] = real_locals.get("context") + + if ctx is not None: + data: t.Dict[str, t.Any] = ctx.get_all().copy() + else: + data = {} + + # Might be in a derived context that only sets local variables + # rather than pushing a context. Local variables follow the scheme + # l_depth_name. Find the highest-depth local that has a value for + # each name. + local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} + + for name, value in real_locals.items(): + if not name.startswith("l_") or value is missing: + # Not a template variable, or no longer relevant. + continue + + try: + _, depth_str, name = name.split("_", 2) + depth = int(depth_str) + except ValueError: + continue + + cur_depth = local_overrides.get(name, (-1,))[0] + + if cur_depth < depth: + local_overrides[name] = (depth, value) + + # Modify the context with any derived context. + for name, (_, value) in local_overrides.items(): + if value is missing: + data.pop(name, None) + else: + data[name] = value + + return data diff --git a/.venv/Lib/site-packages/jinja2/defaults.py b/.venv/Lib/site-packages/jinja2/defaults.py new file mode 100644 index 00000000..638cad3d --- /dev/null +++ b/.venv/Lib/site-packages/jinja2/defaults.py @@ -0,0 +1,48 @@ +import typing as t + +from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 +from .tests import TESTS as DEFAULT_TESTS # noqa: F401 +from .utils import Cycler +from .utils import generate_lorem_ipsum +from .utils import Joiner +from .utils import Namespace + +if t.TYPE_CHECKING: + import typing_extensions as te + +# defaults for the parser / lexer +BLOCK_START_STRING = "{%" +BLOCK_END_STRING = "%}" +VARIABLE_START_STRING = "{{" +VARIABLE_END_STRING = "}}" +COMMENT_START_STRING = "{#" +COMMENT_END_STRING = "#}" +LINE_STATEMENT_PREFIX: t.Optional[str] = None +LINE_COMMENT_PREFIX: t.Optional[str] = None +TRIM_BLOCKS = False +LSTRIP_BLOCKS = False +NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" +KEEP_TRAILING_NEWLINE = False + +# default filters, tests and namespace + +DEFAULT_NAMESPACE = { + "range": range, + "dict": dict, + "lipsum": generate_lorem_ipsum, + "cycler": Cycler, + "joiner": Joiner, + "namespace": Namespace, +} + +# default policies +DEFAULT_POLICIES: t.Dict[str, t.Any] = { + "compiler.ascii_str": True, + "urlize.rel": "noopener", + "urlize.target": None, + "urlize.extra_schemes": None, + "truncate.leeway": 5, + "json.dumps_function": None, + "json.dumps_kwargs": {"sort_keys": True}, + "ext.i18n.trimmed": False, +} diff --git a/.venv/Lib/site-packages/jinja2/environment.py b/.venv/Lib/site-packages/jinja2/environment.py new file mode 100644 index 00000000..0fc6e5be --- /dev/null +++ b/.venv/Lib/site-packages/jinja2/environment.py @@ -0,0 +1,1672 @@ +"""Classes for managing templates and their runtime and compile time +options. +""" + +import os +import typing +import typing as t +import weakref +from collections import ChainMap +from functools import lru_cache +from functools import partial +from functools import reduce +from types import CodeType + +from markupsafe import Markup + +from . import nodes +from .compiler import CodeGenerator +from .compiler import generate +from .defaults import BLOCK_END_STRING +from .defaults import BLOCK_START_STRING +from .defaults import COMMENT_END_STRING +from .defaults import COMMENT_START_STRING +from .defaults import DEFAULT_FILTERS # type: ignore[attr-defined] +from .defaults import DEFAULT_NAMESPACE +from .defaults import DEFAULT_POLICIES +from .defaults import DEFAULT_TESTS # type: ignore[attr-defined] +from .defaults import KEEP_TRAILING_NEWLINE +from .defaults import LINE_COMMENT_PREFIX +from .defaults import LINE_STATEMENT_PREFIX +from .defaults import LSTRIP_BLOCKS +from .defaults import NEWLINE_SEQUENCE +from .defaults import TRIM_BLOCKS +from .defaults import VARIABLE_END_STRING +from .defaults import VARIABLE_START_STRING +from .exceptions import TemplateNotFound +from .exceptions import TemplateRuntimeError +from .exceptions import TemplatesNotFound +from .exceptions import TemplateSyntaxError +from .exceptions import UndefinedError +from .lexer import get_lexer +from .lexer import Lexer +from .lexer import TokenStream +from .nodes import EvalContext +from .parser import Parser +from .runtime import Context +from .runtime import new_context +from .runtime import Undefined +from .utils import _PassArg +from .utils import concat +from .utils import consume +from .utils import import_string +from .utils import internalcode +from .utils import LRUCache +from .utils import missing + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .bccache import BytecodeCache + from .ext import Extension + from .loaders import BaseLoader + +_env_bound = t.TypeVar("_env_bound", bound="Environment") + + +# for direct template usage we have up to ten living environments +@lru_cache(maxsize=10) +def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: + """Return a new spontaneous environment. A spontaneous environment + is used for templates created directly rather than through an + existing environment. + + :param cls: Environment class to create. + :param args: Positional arguments passed to environment. + """ + env = cls(*args) + env.shared = True + return env + + +def create_cache( + size: int, +) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: + """Return the cache class for the given size.""" + if size == 0: + return None + + if size < 0: + return {} + + return LRUCache(size) # type: ignore + + +def copy_cache( + cache: t.Optional[t.MutableMapping[t.Any, t.Any]], +) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: + """Create an empty copy of the given cache.""" + if cache is None: + return None + + if type(cache) is dict: # noqa E721 + return {} + + return LRUCache(cache.capacity) # type: ignore + + +def load_extensions( + environment: "Environment", + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], +) -> t.Dict[str, "Extension"]: + """Load the extensions from the list and bind it to the environment. + Returns a dict of instantiated extensions. + """ + result = {} + + for extension in extensions: + if isinstance(extension, str): + extension = t.cast(t.Type["Extension"], import_string(extension)) + + result[extension.identifier] = extension(environment) + + return result + + +def _environment_config_check(environment: _env_bound) -> _env_bound: + """Perform a sanity check on the environment.""" + assert issubclass( + environment.undefined, Undefined + ), "'undefined' must be a subclass of 'jinja2.Undefined'." + assert ( + environment.block_start_string + != environment.variable_start_string + != environment.comment_start_string + ), "block, variable and comment start strings must be different." + assert environment.newline_sequence in { + "\r", + "\r\n", + "\n", + }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." + return environment + + +class Environment: + r"""The core component of Jinja is the `Environment`. It contains + important shared variables like configuration, filters, tests, + globals and others. Instances of this class may be modified if + they are not shared and if no template was loaded so far. + Modifications on environments after the first template was loaded + will lead to surprising effects and undefined behavior. + + Here are the possible initialization parameters: + + `block_start_string` + The string marking the beginning of a block. Defaults to ``'{%'``. + + `block_end_string` + The string marking the end of a block. Defaults to ``'%}'``. + + `variable_start_string` + The string marking the beginning of a print statement. + Defaults to ``'{{'``. + + `variable_end_string` + The string marking the end of a print statement. Defaults to + ``'}}'``. + + `comment_start_string` + The string marking the beginning of a comment. Defaults to ``'{#'``. + + `comment_end_string` + The string marking the end of a comment. Defaults to ``'#}'``. + + `line_statement_prefix` + If given and a string, this will be used as prefix for line based + statements. See also :ref:`line-statements`. + + `line_comment_prefix` + If given and a string, this will be used as prefix for line based + comments. See also :ref:`line-statements`. + + .. versionadded:: 2.2 + + `trim_blocks` + If this is set to ``True`` the first newline after a block is + removed (block, not variable tag!). Defaults to `False`. + + `lstrip_blocks` + If this is set to ``True`` leading spaces and tabs are stripped + from the start of a line to a block. Defaults to `False`. + + `newline_sequence` + The sequence that starts a newline. Must be one of ``'\r'``, + ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a + useful default for Linux and OS X systems as well as web + applications. + + `keep_trailing_newline` + Preserve the trailing newline when rendering templates. + The default is ``False``, which causes a single newline, + if present, to be stripped from the end of the template. + + .. versionadded:: 2.7 + + `extensions` + List of Jinja extensions to use. This can either be import paths + as strings or extension classes. For more information have a + look at :ref:`the extensions documentation `. + + `optimized` + should the optimizer be enabled? Default is ``True``. + + `undefined` + :class:`Undefined` or a subclass of it that is used to represent + undefined values in the template. + + `finalize` + A callable that can be used to process the result of a variable + expression before it is output. For example one can convert + ``None`` implicitly into an empty string here. + + `autoescape` + If set to ``True`` the XML/HTML autoescaping feature is enabled by + default. For more details about autoescaping see + :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also + be a callable that is passed the template name and has to + return ``True`` or ``False`` depending on autoescape should be + enabled by default. + + .. versionchanged:: 2.4 + `autoescape` can now be a function + + `loader` + The template loader for this environment. + + `cache_size` + The size of the cache. Per default this is ``400`` which means + that if more than 400 templates are loaded the loader will clean + out the least recently used template. If the cache size is set to + ``0`` templates are recompiled all the time, if the cache size is + ``-1`` the cache will not be cleaned. + + .. versionchanged:: 2.8 + The cache size was increased to 400 from a low 50. + + `auto_reload` + Some loaders load templates from locations where the template + sources may change (ie: file system or database). If + ``auto_reload`` is set to ``True`` (default) every time a template is + requested the loader checks if the source changed and if yes, it + will reload the template. For higher performance it's possible to + disable that. + + `bytecode_cache` + If set to a bytecode cache object, this object will provide a + cache for the internal Jinja bytecode so that templates don't + have to be parsed if they were not changed. + + See :ref:`bytecode-cache` for more information. + + `enable_async` + If set to true this enables async template execution which + allows using async functions and generators. + """ + + #: if this environment is sandboxed. Modifying this variable won't make + #: the environment sandboxed though. For a real sandboxed environment + #: have a look at jinja2.sandbox. This flag alone controls the code + #: generation by the compiler. + sandboxed = False + + #: True if the environment is just an overlay + overlayed = False + + #: the environment this environment is linked to if it is an overlay + linked_to: t.Optional["Environment"] = None + + #: shared environments have this set to `True`. A shared environment + #: must not be modified + shared = False + + #: the class that is used for code generation. See + #: :class:`~jinja2.compiler.CodeGenerator` for more information. + code_generator_class: t.Type["CodeGenerator"] = CodeGenerator + + concat = "".join + + #: the context class that is used for templates. See + #: :class:`~jinja2.runtime.Context` for more information. + context_class: t.Type[Context] = Context + + template_class: t.Type["Template"] + + def __init__( + self, + block_start_string: str = BLOCK_START_STRING, + block_end_string: str = BLOCK_END_STRING, + variable_start_string: str = VARIABLE_START_STRING, + variable_end_string: str = VARIABLE_END_STRING, + comment_start_string: str = COMMENT_START_STRING, + comment_end_string: str = COMMENT_END_STRING, + line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, + line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, + trim_blocks: bool = TRIM_BLOCKS, + lstrip_blocks: bool = LSTRIP_BLOCKS, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, + keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), + optimized: bool = True, + undefined: t.Type[Undefined] = Undefined, + finalize: t.Optional[t.Callable[..., t.Any]] = None, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, + loader: t.Optional["BaseLoader"] = None, + cache_size: int = 400, + auto_reload: bool = True, + bytecode_cache: t.Optional["BytecodeCache"] = None, + enable_async: bool = False, + ): + # !!Important notice!! + # The constructor accepts quite a few arguments that should be + # passed by keyword rather than position. However it's important to + # not change the order of arguments because it's used at least + # internally in those cases: + # - spontaneous environments (i18n extension and Template) + # - unittests + # If parameter changes are required only add parameters at the end + # and don't change the arguments (or the defaults!) of the arguments + # existing already. + + # lexer / parser information + self.block_start_string = block_start_string + self.block_end_string = block_end_string + self.variable_start_string = variable_start_string + self.variable_end_string = variable_end_string + self.comment_start_string = comment_start_string + self.comment_end_string = comment_end_string + self.line_statement_prefix = line_statement_prefix + self.line_comment_prefix = line_comment_prefix + self.trim_blocks = trim_blocks + self.lstrip_blocks = lstrip_blocks + self.newline_sequence = newline_sequence + self.keep_trailing_newline = keep_trailing_newline + + # runtime information + self.undefined: t.Type[Undefined] = undefined + self.optimized = optimized + self.finalize = finalize + self.autoescape = autoescape + + # defaults + self.filters = DEFAULT_FILTERS.copy() + self.tests = DEFAULT_TESTS.copy() + self.globals = DEFAULT_NAMESPACE.copy() + + # set the loader provided + self.loader = loader + self.cache = create_cache(cache_size) + self.bytecode_cache = bytecode_cache + self.auto_reload = auto_reload + + # configurable policies + self.policies = DEFAULT_POLICIES.copy() + + # load extensions + self.extensions = load_extensions(self, extensions) + + self.is_async = enable_async + _environment_config_check(self) + + def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: + """Adds an extension after the environment was created. + + .. versionadded:: 2.5 + """ + self.extensions.update(load_extensions(self, [extension])) + + def extend(self, **attributes: t.Any) -> None: + """Add the items to the instance of the environment if they do not exist + yet. This is used by :ref:`extensions ` to register + callbacks and configuration values without breaking inheritance. + """ + for key, value in attributes.items(): + if not hasattr(self, key): + setattr(self, key, value) + + def overlay( + self, + block_start_string: str = missing, + block_end_string: str = missing, + variable_start_string: str = missing, + variable_end_string: str = missing, + comment_start_string: str = missing, + comment_end_string: str = missing, + line_statement_prefix: t.Optional[str] = missing, + line_comment_prefix: t.Optional[str] = missing, + trim_blocks: bool = missing, + lstrip_blocks: bool = missing, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, + keep_trailing_newline: bool = missing, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, + optimized: bool = missing, + undefined: t.Type[Undefined] = missing, + finalize: t.Optional[t.Callable[..., t.Any]] = missing, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, + loader: t.Optional["BaseLoader"] = missing, + cache_size: int = missing, + auto_reload: bool = missing, + bytecode_cache: t.Optional["BytecodeCache"] = missing, + enable_async: bool = missing, + ) -> "te.Self": + """Create a new overlay environment that shares all the data with the + current environment except for cache and the overridden attributes. + Extensions cannot be removed for an overlayed environment. An overlayed + environment automatically gets all the extensions of the environment it + is linked to plus optional extra extensions. + + Creating overlays should happen after the initial environment was set + up completely. Not all attributes are truly linked, some are just + copied over so modifications on the original environment may not shine + through. + + .. versionchanged:: 3.1.5 + ``enable_async`` is applied correctly. + + .. versionchanged:: 3.1.2 + Added the ``newline_sequence``, ``keep_trailing_newline``, + and ``enable_async`` parameters to match ``__init__``. + """ + args = dict(locals()) + del args["self"], args["cache_size"], args["extensions"], args["enable_async"] + + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.overlayed = True + rv.linked_to = self + + for key, value in args.items(): + if value is not missing: + setattr(rv, key, value) + + if cache_size is not missing: + rv.cache = create_cache(cache_size) + else: + rv.cache = copy_cache(self.cache) + + rv.extensions = {} + for key, value in self.extensions.items(): + rv.extensions[key] = value.bind(rv) + if extensions is not missing: + rv.extensions.update(load_extensions(rv, extensions)) + + if enable_async is not missing: + rv.is_async = enable_async + + return _environment_config_check(rv) + + @property + def lexer(self) -> Lexer: + """The lexer for this environment.""" + return get_lexer(self) + + def iter_extensions(self) -> t.Iterator["Extension"]: + """Iterates over the extensions by priority.""" + return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) + + def getitem( + self, obj: t.Any, argument: t.Union[str, t.Any] + ) -> t.Union[t.Any, Undefined]: + """Get an item or attribute of an object but prefer the item.""" + try: + return obj[argument] + except (AttributeError, TypeError, LookupError): + if isinstance(argument, str): + try: + attr = str(argument) + except Exception: + pass + else: + try: + return getattr(obj, attr) + except AttributeError: + pass + return self.undefined(obj=obj, name=argument) + + def getattr(self, obj: t.Any, attribute: str) -> t.Any: + """Get an item or attribute of an object but prefer the attribute. + Unlike :meth:`getitem` the attribute *must* be a string. + """ + try: + return getattr(obj, attribute) + except AttributeError: + pass + try: + return obj[attribute] + except (TypeError, LookupError, AttributeError): + return self.undefined(obj=obj, name=attribute) + + def _filter_test_common( + self, + name: t.Union[str, Undefined], + value: t.Any, + args: t.Optional[t.Sequence[t.Any]], + kwargs: t.Optional[t.Mapping[str, t.Any]], + context: t.Optional[Context], + eval_ctx: t.Optional[EvalContext], + is_filter: bool, + ) -> t.Any: + if is_filter: + env_map = self.filters + type_name = "filter" + else: + env_map = self.tests + type_name = "test" + + func = env_map.get(name) # type: ignore + + if func is None: + msg = f"No {type_name} named {name!r}." + + if isinstance(name, Undefined): + try: + name._fail_with_undefined_error() + except Exception as e: + msg = f"{msg} ({e}; did you forget to quote the callable name?)" + + raise TemplateRuntimeError(msg) + + args = [value, *(args if args is not None else ())] + kwargs = kwargs if kwargs is not None else {} + pass_arg = _PassArg.from_obj(func) + + if pass_arg is _PassArg.context: + if context is None: + raise TemplateRuntimeError( + f"Attempted to invoke a context {type_name} without context." + ) + + args.insert(0, context) + elif pass_arg is _PassArg.eval_context: + if eval_ctx is None: + if context is not None: + eval_ctx = context.eval_ctx + else: + eval_ctx = EvalContext(self) + + args.insert(0, eval_ctx) + elif pass_arg is _PassArg.environment: + args.insert(0, self) + + return func(*args, **kwargs) + + def call_filter( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a filter on a value the same way the compiler does. + + This might return a coroutine if the filter is running from an + environment in async mode and the filter supports async + execution. It's your responsibility to await this if needed. + + .. versionadded:: 2.7 + """ + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, True + ) + + def call_test( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a test on a value the same way the compiler does. + + This might return a coroutine if the test is running from an + environment in async mode and the test supports async execution. + It's your responsibility to await this if needed. + + .. versionchanged:: 3.0 + Tests support ``@pass_context``, etc. decorators. Added + the ``context`` and ``eval_ctx`` parameters. + + .. versionadded:: 2.7 + """ + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, False + ) + + @internalcode + def parse( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> nodes.Template: + """Parse the sourcecode and return the abstract syntax tree. This + tree of nodes is used by the compiler to convert the template into + executable source- or bytecode. This is useful for debugging or to + extract information from templates. + + If you are :ref:`developing Jinja extensions ` + this gives you a good overview of the node tree generated. + """ + try: + return self._parse(source, name, filename) + except TemplateSyntaxError: + self.handle_exception(source=source) + + def _parse( + self, source: str, name: t.Optional[str], filename: t.Optional[str] + ) -> nodes.Template: + """Internal parsing function used by `parse` and `compile`.""" + return Parser(self, source, name, filename).parse() + + def lex( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> t.Iterator[t.Tuple[int, str, str]]: + """Lex the given sourcecode and return a generator that yields + tokens as tuples in the form ``(lineno, token_type, value)``. + This can be useful for :ref:`extension development ` + and debugging templates. + + This does not perform preprocessing. If you want the preprocessing + of the extensions to be applied you have to filter source through + the :meth:`preprocess` method. + """ + source = str(source) + try: + return self.lexer.tokeniter(source, name, filename) + except TemplateSyntaxError: + self.handle_exception(source=source) + + def preprocess( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> str: + """Preprocesses the source with all extensions. This is automatically + called for all parsing and compiling methods but *not* for :meth:`lex` + because there you usually only want the actual source tokenized. + """ + return reduce( + lambda s, e: e.preprocess(s, name, filename), + self.iter_extensions(), + str(source), + ) + + def _tokenize( + self, + source: str, + name: t.Optional[str], + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> TokenStream: + """Called by the parser to do the preprocessing and filtering + for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. + """ + source = self.preprocess(source, name, filename) + stream = self.lexer.tokenize(source, name, filename, state) + + for ext in self.iter_extensions(): + stream = ext.filter_stream(stream) # type: ignore + + if not isinstance(stream, TokenStream): + stream = TokenStream(stream, name, filename) + + return stream + + def _generate( + self, + source: nodes.Template, + name: t.Optional[str], + filename: t.Optional[str], + defer_init: bool = False, + ) -> str: + """Internal hook that can be overridden to hook a different generate + method in. + + .. versionadded:: 2.5 + """ + return generate( # type: ignore + source, + self, + name, + filename, + defer_init=defer_init, + optimized=self.optimized, + ) + + def _compile(self, source: str, filename: str) -> CodeType: + """Internal hook that can be overridden to hook a different compile + method in. + + .. versionadded:: 2.5 + """ + return compile(source, filename, "exec") + + @typing.overload + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[False]" = False, + defer_init: bool = False, + ) -> CodeType: ... + + @typing.overload + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[True]" = ..., + defer_init: bool = False, + ) -> str: ... + + @internalcode + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: bool = False, + defer_init: bool = False, + ) -> t.Union[str, CodeType]: + """Compile a node or template source code. The `name` parameter is + the load name of the template after it was joined using + :meth:`join_path` if necessary, not the filename on the file system. + the `filename` parameter is the estimated filename of the template on + the file system. If the template came from a database or memory this + can be omitted. + + The return value of this method is a python code object. If the `raw` + parameter is `True` the return value will be a string with python + code equivalent to the bytecode returned otherwise. This method is + mainly used internally. + + `defer_init` is use internally to aid the module code generator. This + causes the generated code to be able to import without the global + environment variable to be set. + + .. versionadded:: 2.4 + `defer_init` parameter added. + """ + source_hint = None + try: + if isinstance(source, str): + source_hint = source + source = self._parse(source, name, filename) + source = self._generate(source, name, filename, defer_init=defer_init) + if raw: + return source + if filename is None: + filename = "